/
9,317,600.html
945 lines (934 loc) · 118 KB
/
9,317,600.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
<html><head>
<base target="_top"/>
<title>United States Patent: 9317600</title></head>
<!---BUF1=9317600
BUF7=2016
BUF8=89633
BUF9=/1/
BUF51=9
---->
<body bgcolor="#FFFFFF">
<a name="top"></a>
<center>
<img alt="[US Patent & Trademark Office, Patent Full Text and Image Database]" src="/netaicon/PTO/patfthdr.gif"/>
<br/>
<table>
<tbody><tr><td align="center">
<a href="/netahtml/PTO/index.html"><img alt="[Home]" border="0" src="/netaicon/PTO/home.gif" valign="middle"/></a>
<a href="/netahtml/PTO/search-bool.html"><img alt="[Boolean Search]" border="0" src="/netaicon/PTO/boolean.gif" valign="middle"/></a>
<a href="/netahtml/PTO/search-adv.htm"><img alt="[Manual Search]" border="0" src="/netaicon/PTO/manual.gif" valign="middle"/></a>
<a href="/netahtml/PTO/srchnum.htm"><img alt="[Number Search]" border="0" src="/netaicon/PTO/number.gif" valign="middle"/></a>
<a href="/netahtml/PTO/help/help.htm"><img alt="[Help]" border="0" src="/netaicon/PTO/help.gif" valign="middle"/></a>
</td></tr>
<tr><td align="center">
<a href="/netacgi/nph-Parser?Sect1=PTO2&Sect2=HITOFF&u=%2Fnetahtml%2FPTO%2Fsearch-adv.htm&r=780&f=S&l=50&d=PTXT&s1=facebook&p=15&Query=facebook"><img alt="[PREV_LIST]" border="0" src="/netaicon/PTO/prevlist.gif" valign="MIDDLE"/></a>
<a href="/netacgi/nph-Parser?Sect1=PTO2&Sect2=HITOFF&u=%2Fnetahtml%2FPTO%2Fsearch-adv.htm&r=780&f=S&l=50&d=PTXT&s1=facebook&p=16&Query=facebook"><img alt="[HIT_LIST]" border="0" src="/netaicon/PTO/hitlist.gif" valign="MIDDLE"/></a>
<a href="/netacgi/nph-Parser?Sect1=PTO2&Sect2=HITOFF&u=%2Fnetahtml%2FPTO%2Fsearch-adv.htm&r=780&f=S&l=50&d=PTXT&s1=facebook&p=17&Query=facebook"><img alt="[NEXT_LIST]" border="0" src="/netaicon/PTO/nextlist.gif" valign="MIDDLE"/></a>
<a href="/netacgi/nph-Parser?Sect1=PTO2&Sect2=HITOFF&u=%2Fnetahtml%2FPTO%2Fsearch-adv.htm&r=779&f=G&l=50&d=PTXT&s1=facebook&p=16&OS=facebook"><img alt="[PREV_DOC]" border="0" src="/netaicon/PTO/prevdoc.gif" valign="MIDDLE"/></a>
<a href="/netacgi/nph-Parser?Sect1=PTO2&Sect2=HITOFF&u=%2Fnetahtml%2FPTO%2Fsearch-adv.htm&r=781&f=G&l=50&d=PTXT&s1=facebook&p=16&OS=facebook"><img alt="[NEXT_DOC]" border="0" src="/netaicon/PTO/nextdoc.gif" valign="MIDDLE"/></a>
<a href="#bottom"><img alt="[Bottom]" border="0" src="/netaicon/PTO/bottom.gif" valign="middle"/></a>
</td></tr>
<tr><td align="center">
<a href="http://ebiz1.uspto.gov/vision-service/ShoppingCart_P/ShowShoppingCart?backUrl1=http%3A//patft.uspto.gov/netacgi/nph-Parser?Sect1%3DPTO2%26Sect2%3DHITOFF%26u%3D%25252Fnetahtml%25252FPTO%25252Fsearch-adv.htm%26r%3D780%26f%3DG%26l%3D50%26d%3DPTXT%26s1%3Dfacebook%26p%3D16%26OS%3Dfacebook&backLabel1=Back%20to%20Document%3A%209317600"><img alt="[
View Shopping Cart]" border="0" src="/netaicon/PTO/cart.gif" valign="middle"/></a>
<a href="http://ebiz1.uspto.gov/vision-service/ShoppingCart_P/AddToShoppingCart?docNumber=9317600&backUrl1=http%3A//patft.uspto.gov/netacgi/nph-Parser?Sect1%3DPTO2%26Sect2%3DHITOFF%26u%3D%25252Fnetahtml%25252FPTO%25252Fsearch-adv.htm%26r%3D780%26f%3DG%26l%3D50%26d%3DPTXT%26s1%3Dfacebook%26p%3D16%26OS%3Dfacebook&backLabel1=Back%20to%20Document%3A%209317600">
<img alt="[Add to Shopping Cart]" border="0" src="/netaicon/PTO/order.gif" valign="middle"/></a>
</td></tr>
<tr><td align="center">
<a href="http://pdfpiw.uspto.gov/.piw?Docid=09317600&homeurl=http%3A%2F%2Fpatft.uspto.gov%2Fnetacgi%2Fnph-Parser%3FSect1%3DPTO2%2526Sect2%3DHITOFF%2526u%3D%25252Fnetahtml%25252FPTO%25252Fsearch-adv.htm%2526r%3D780%2526f%3DG%2526l%3D50%2526d%3DPTXT%2526s1%3Dfacebook%2526p%3D16%2526OS%3Dfacebook%2526RS%3Dfacebook&PageNum=&Rtype=&SectionNum=&idkey=NONE&Input=View+first+page"><img alt="[Image]" border="0" src="/netaicon/PTO/image.gif" valign="middle"/></a>
</td></tr>
</tbody></table>
</center>
<table width="100%">
<tbody><tr><td align="left" width="50%"> </td>
<td align="right" valign="bottom" width="50%"><font size="-1">( <strong>780</strong></font> <font size="-2">of</font> <strong><font size="-1">7895</font></strong> <font size="-1">)</font></td></tr></tbody></table>
<hr/>
<table width="100%">
<tbody><tr> <td align="left" width="50%"><b>United States Patent </b></td>
<td align="right" width="50%"><b>9,317,600</b></td>
</tr>
<tr><td align="left" width="50%"><b>
Harris
, et al.</b>
</td>
<td align="right" width="50%"> <b>
April 19, 2016
</b></td>
</tr>
</tbody></table>
<hr/>
<font size="+1">View of a physical space augmented with social media content originating
from a geo-location of the physical space
</font><br/>
<br/><center><b>Abstract</b></center>
<p> The disclosure relates to systems and methods for augmenting a view of a
physical space of one or more geographically definable locations
("geo-locations") with social media and/or other content originating from
the one or more geo-locations. Generally speaking, the system may include
a computing device having one or more processors programmed to augment
(e.g., add to, overlay, embed, etc.) the view of the physical space of a
geo-location with social media content, thereby allowing a user to view a
physical space at a geo-location along with social media content that was
created from the geo-location.
</p>
<hr/>
<table width="100%"> <tbody><tr> <th align="left" scope="row" valign="top" width="10%">Inventors:</th> <td align="left" width="90%">
<b>Harris; Philip B.</b> (Naples, FL)<b>, Mitchell; Scott K.</b> (Naples, FL)<b>, Mulroy; Michael J.</b> (Naples, FL) </td> </tr>
<tr><th align="left" scope="row" valign="top" width="10%">Applicant: </th><td align="left" width="90%"> <table> <tbody><tr> <th align="center" scope="column">Name</th> <th align="center" scope="column">City</th> <th align="center" scope="column">State</th> <th align="center" scope="column">Country</th> <th align="center" scope="column">Type</th> </tr> <tr> <td> <b><br/>Geofeedia, Inc.</b> </td><td> <br/>Naples </td><td align="center"> <br/>FL </td><td align="center"> <br/>US </td> <td align="left"> </td>
</tr> </tbody></table>
<!-- AANM>
~AANM Geofeedia, Inc.
~AACI Naples
~AAST FL
~AACO US
</AANM -->
</td></tr>
<tr> <th align="left" scope="row" valign="top" width="10%">Assignee:</th>
<td align="left" width="90%">
<b>Geofeedia, Inc.</b>
(Naples,
FL)
<br/>
</td>
</tr>
<tr><th align="left" nowrap="" scope="row" valign="top" width="10%">Family ID:
</th><td align="left" width="90%">
<b>1000001786968
</b></td></tr>
<tr><th align="left" nowrap="" scope="row" valign="top" width="10%">Appl. No.:
</th><td align="left" width="90%">
<b>14/215,612</b></td></tr>
<tr><th align="left" scope="row" valign="top" width="10%">Filed:
</th><td align="left" width="90%">
<b>March 17, 2014</b></td></tr>
</tbody></table>
<hr/> <center><b>Prior Publication Data</b></center> <hr/> <table width="100%"> <tbody><tr><th scope="col"></th><th scope="col"></th><td></td></tr> <tr><td align="left">
</td><th align="center" scope="col"><b><u>Document Identifier</u></b></th><th align="center" scope="col"><b><u>Publication Date</u></b></th></tr><tr><td align="center"> </td><td align="center"> US 20140280278 A1</td><td align="center">Sep 18, 2014</td></tr><tr><td align="center">
</td>
</tr> </tbody></table>
<hr/> <center><b>Related U.S. Patent Documents</b></center> <hr/> <table width="100%"> <tbody><tr><th scope="col" width="7%"></th><th scope="col"></th><th scope="col"></th> <th scope="col"></th><th scope="col"></th><td></td></tr> <tr><td align="left">
</td><th align="center" scope="col"><b><u>Application Number</u></b></th><th align="center" scope="col"><b><u>Filing Date</u></b></th><th align="center" scope="col"><b><u>Patent Number</u></b></th><th align="center" scope="col"><b><u>Issue Date</u></b></th></tr><tr><td align="center"> </td><td align="center">61800951</td><td align="center">Mar 15, 2013</td><td align="center"></td><td align="center"></td></tr><tr><td align="center">
</td>
</tr> </tbody></table><td< td=""></td<><td< td=""></td<> <hr/>
<p> <table width="100%"> <tbody><tr><td align="left" valign="top" width="30%"><b>Current U.S. Class:</b></td> <td align="right" valign="top" width="70%"><b>1/1</b> </td></tr>
<tr><td align="left" valign="top" width="30%"><b>Current CPC Class: </b></td>
<td align="right" valign="top" width="70%">G06F 17/241 (20130101); G06F 17/3087 (20130101); G06F 17/30861 (20130101)</td></tr>
<tr><td align="left" valign="top" width="30%"><b>Current International Class: </b></td>
<td align="right" valign="top" width="70%">G06F 7/00 (20060101); G06F 17/24 (20060101); G06F 17/30 (20060101)</td></tr>
<tr><td align="left" valign="top" width="30%"><b>Field of Search: </b></td>
<td align="right" valign="top" width="70%">
;707/706,758 ;345/8
</td></tr>
</tbody></table>
</p><hr/><center><b>References Cited <a href="/netacgi/nph-Parser?Sect1=PTO2&Sect2=HITOFF&p=1&u=%2Fnetahtml%2Fsearch-adv.htm&r=0&f=S&l=50&d=PALL&Query=ref/9317600">[Referenced By]</a></b></center> <hr/>
<center><b>U.S. Patent Documents</b></center>
<table width="100%"> <tbody><tr><th scope="col" width="33%"></th> <th scope="col" width="33%"></th> <th scope="col" width="34%"></th></tr> <tr> <td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F6363320">6363320</a></td><td align="left">
March 2002</td><td align="left">
Chou</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F7522940">7522940</a></td><td align="left">
April 2009</td><td align="left">
Jendbro et al.</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F7680796">7680796</a></td><td align="left">
March 2010</td><td align="left">
Yeh</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F7698336">7698336</a></td><td align="left">
April 2010</td><td align="left">
Nath</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F7912451">7912451</a></td><td align="left">
March 2011</td><td align="left">
Eckhart</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F7974983">7974983</a></td><td align="left">
July 2011</td><td align="left">
Goeldi</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F8103741">8103741</a></td><td align="left">
January 2012</td><td align="left">
Frazier et al.</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F8341223">8341223</a></td><td align="left">
December 2012</td><td align="left">
Patton et al.</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F8428228">8428228</a></td><td align="left">
April 2013</td><td align="left">
Baxter, Jr.</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F8484224">8484224</a></td><td align="left">
July 2013</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F8595317">8595317</a></td><td align="left">
November 2013</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F8612533">8612533</a></td><td align="left">
December 2013</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F8639767">8639767</a></td><td align="left">
January 2014</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F8655873">8655873</a></td><td align="left">
February 2014</td><td align="left">
Mitchell et al.</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F8655983">8655983</a></td><td align="left">
February 2014</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F8812951">8812951</a></td><td align="left">
August 2014</td><td align="left">
White</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F8843515">8843515</a></td><td align="left">
September 2014</td><td align="left">
Burris</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F8849935">8849935</a></td><td align="left">
September 2014</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F8850531">8850531</a></td><td align="left">
September 2014</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F8862589">8862589</a></td><td align="left">
October 2014</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F9055074">9055074</a></td><td align="left">
June 2015</td><td align="left">
Harris</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F9077675">9077675</a></td><td align="left">
July 2015</td><td align="left">
Harris</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F9077782">9077782</a></td><td align="left">
July 2015</td><td align="left">
Harris</td></tr><tr><td align="left">
<a href="/netacgi/nph-Parser?Sect2=PTO1&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&d=PALL&RefSrch=yes&Query=PN%2F9258373">9258373</a></td><td align="left">
February 2016</td><td align="left">
Harris</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20020029226&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2002/0029226</a></td><td align="left">
March 2002</td><td align="left">
Li et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20020029384&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2002/0029384</a></td><td align="left">
March 2002</td><td align="left">
Griggs</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20020188669&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2002/0188669</a></td><td align="left">
December 2002</td><td align="left">
Levine</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20030040971&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2003/0040971</a></td><td align="left">
February 2003</td><td align="left">
Freedenberg</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20030088609&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2003/0088609</a></td><td align="left">
May 2003</td><td align="left">
Guedalia et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20040203854&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2004/0203854</a></td><td align="left">
October 2004</td><td align="left">
Nowak</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20040205585&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2004/0205585</a></td><td align="left">
October 2004</td><td align="left">
McConnell</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20040225635&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2004/0225635</a></td><td align="left">
November 2004</td><td align="left">
Toyama et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20050034074&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2005/0034074</a></td><td align="left">
February 2005</td><td align="left">
Munson et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20060002317&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2006/0002317</a></td><td align="left">
January 2006</td><td align="left">
Punaganti Venkata</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20060184968&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2006/0184968</a></td><td align="left">
August 2006</td><td align="left">
Clayton et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20060200305&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2006/0200305</a></td><td align="left">
September 2006</td><td align="left">
Sheha et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20070112729&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2007/0112729</a></td><td align="left">
May 2007</td><td align="left">
Wiseman et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20070121843&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2007/0121843</a></td><td align="left">
May 2007</td><td align="left">
Atazky et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20070143345&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2007/0143345</a></td><td align="left">
June 2007</td><td align="left">
Jones et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20070210937&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2007/0210937</a></td><td align="left">
September 2007</td><td align="left">
Smith et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20070276919&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2007/0276919</a></td><td align="left">
November 2007</td><td align="left">
Buchmann et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20070294299&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2007/0294299</a></td><td align="left">
December 2007</td><td align="left">
Goldstein</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20080092054&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2008/0092054</a></td><td align="left">
April 2008</td><td align="left">
Bhumkar et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20080104019&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2008/0104019</a></td><td align="left">
May 2008</td><td align="left">
Nath</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20080147674&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2008/0147674</a></td><td align="left">
June 2008</td><td align="left">
Nandiwada</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20080189099&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2008/0189099</a></td><td align="left">
August 2008</td><td align="left">
Friedman</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20080192934&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2008/0192934</a></td><td align="left">
August 2008</td><td align="left">
Nelger et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20080250031&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2008/0250031</a></td><td align="left">
October 2008</td><td align="left">
Ting et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20090005968&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2009/0005968</a></td><td align="left">
January 2009</td><td align="left">
Vengroff et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20090102859&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2009/0102859</a></td><td align="left">
April 2009</td><td align="left">
Athsani et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20090132435&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2009/0132435</a></td><td align="left">
May 2009</td><td align="left">
Titus et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20090138497&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2009/0138497</a></td><td align="left">
May 2009</td><td align="left">
Zavoli et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20090210426&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2009/0210426</a></td><td align="left">
August 2009</td><td align="left">
Kulakov</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20090217232&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2009/0217232</a></td><td align="left">
August 2009</td><td align="left">
Beerel</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20090297118&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2009/0297118</a></td><td align="left">
December 2009</td><td align="left">
Fink</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20090300528&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2009/0300528</a></td><td align="left">
December 2009</td><td align="left">
Stambaugh</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20100010907&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2010/0010907</a></td><td align="left">
January 2010</td><td align="left">
Dasgupta</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20100030648&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2010/0030648</a></td><td align="left">
February 2010</td><td align="left">
Manolescu</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20100076968&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2010/0076968</a></td><td align="left">
March 2010</td><td align="left">
Boyns et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20100079338&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2010/0079338</a></td><td align="left">
April 2010</td><td align="left">
Wooden</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20100145947&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2010/0145947</a></td><td align="left">
June 2010</td><td align="left">
Kolman et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20100149399&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2010/0149399</a></td><td align="left">
June 2010</td><td align="left">
Mukai et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20100153386&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2010/0153386</a></td><td align="left">
June 2010</td><td align="left">
Tysowski</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20100153410&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2010/0153410</a></td><td align="left">
June 2010</td><td align="left">
Jin et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20100177120&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2010/0177120</a></td><td align="left">
July 2010</td><td align="left">
Balfour</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20100180001&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2010/0180001</a></td><td align="left">
July 2010</td><td align="left">
Hardt</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110007941&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0007941</a></td><td align="left">
January 2011</td><td align="left">
Chen et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110010674&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0010674</a></td><td align="left">
January 2011</td><td align="left">
Knize et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110035284&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0035284</a></td><td align="left">
February 2011</td><td align="left">
Moshfeghi</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110040894&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0040894</a></td><td align="left">
February 2011</td><td align="left">
Shrum</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110055176&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0055176</a></td><td align="left">
March 2011</td><td align="left">
Choi</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110072106&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0072106</a></td><td align="left">
March 2011</td><td align="left">
Hoffert</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110072114&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0072114</a></td><td align="left">
March 2011</td><td align="left">
Hoffert</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110078584&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0078584</a></td><td align="left">
March 2011</td><td align="left">
Winterstein et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110083013&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0083013</a></td><td align="left">
April 2011</td><td align="left">
Nice et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110113096&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0113096</a></td><td align="left">
May 2011</td><td align="left">
Long et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110123066&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0123066</a></td><td align="left">
May 2011</td><td align="left">
Chen et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110131496&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0131496</a></td><td align="left">
June 2011</td><td align="left">
Abram et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110137561&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0137561</a></td><td align="left">
June 2011</td><td align="left">
Kankainen</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110142347&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0142347</a></td><td align="left">
June 2011</td><td align="left">
Chen et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110153368&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0153368</a></td><td align="left">
June 2011</td><td align="left">
Pierre et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110227699&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0227699</a></td><td align="left">
September 2011</td><td align="left">
Seth et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110270940&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0270940</a></td><td align="left">
November 2011</td><td align="left">
Johnson et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110288917&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0288917</a></td><td align="left">
November 2011</td><td align="left">
Wanek et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20110307307&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2011/0307307</a></td><td align="left">
December 2011</td><td align="left">
Benmbarek</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120001938&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0001938</a></td><td align="left">
January 2012</td><td align="left">
Sandberg</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120047219&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0047219</a></td><td align="left">
February 2012</td><td align="left">
Feng et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120078503&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0078503</a></td><td align="left">
March 2012</td><td align="left">
Dzubay et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120084323&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0084323</a></td><td align="left">
April 2012</td><td align="left">
Epshtein et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120101880&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0101880</a></td><td align="left">
April 2012</td><td align="left">
Alexander</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120124161&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0124161</a></td><td align="left">
May 2012</td><td align="left">
Tidwell</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120150901&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0150901</a></td><td align="left">
June 2012</td><td align="left">
Johnson et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120158536&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0158536</a></td><td align="left">
June 2012</td><td align="left">
Gratton</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120166367&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0166367</a></td><td align="left">
June 2012</td><td align="left">
Murdock et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120212398&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0212398</a></td><td align="left">
August 2012</td><td align="left">
Border et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120221687&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0221687</a></td><td align="left">
August 2012</td><td align="left">
Hunter et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120232939&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0232939</a></td><td align="left">
September 2012</td><td align="left">
Pierre et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120239763&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0239763</a></td><td align="left">
September 2012</td><td align="left">
Musil</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120254774&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0254774</a></td><td align="left">
October 2012</td><td align="left">
Patton</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120259791&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0259791</a></td><td align="left">
October 2012</td><td align="left">
Zoidze</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120276848&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0276848</a></td><td align="left">
November 2012</td><td align="left">
Krattiger et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120276918&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0276918</a></td><td align="left">
November 2012</td><td align="left">
Krattiger et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120323687&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0323687</a></td><td align="left">
December 2012</td><td align="left">
Schuster et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20120330959&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2012/0330959</a></td><td align="left">
December 2012</td><td align="left">
Kretz et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130013713&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0013713</a></td><td align="left">
January 2013</td><td align="left">
Shoham</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130018957&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0018957</a></td><td align="left">
January 2013</td><td align="left">
Parnaby et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130051611&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0051611</a></td><td align="left">
February 2013</td><td align="left">
Hicks</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130054672&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0054672</a></td><td align="left">
February 2013</td><td align="left">
Stilling</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130060796&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0060796</a></td><td align="left">
March 2013</td><td align="left">
Gilg</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130073388&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0073388</a></td><td align="left">
March 2013</td><td align="left">
Heath</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130073389&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0073389</a></td><td align="left">
March 2013</td><td align="left">
Heath</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130073631&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0073631</a></td><td align="left">
March 2013</td><td align="left">
Patton et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130110631&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0110631</a></td><td align="left">
May 2013</td><td align="left">
Mitchell et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130110641&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0110641</a></td><td align="left">
May 2013</td><td align="left">
Ormont</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130132194&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0132194</a></td><td align="left">
May 2013</td><td align="left">
Rajaram</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130150015&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0150015</a></td><td align="left">
June 2013</td><td align="left">
Valko et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130159463&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0159463</a></td><td align="left">
June 2013</td><td align="left">
Bentley et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130201182&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0201182</a></td><td align="left">
August 2013</td><td align="left">
Kuroki et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130238599&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0238599</a></td><td align="left">
September 2013</td><td align="left">
Burris</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130238652&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0238652</a></td><td align="left">
September 2013</td><td align="left">
Burris</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130238658&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0238658</a></td><td align="left">
September 2013</td><td align="left">
Burris</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130268558&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0268558</a></td><td align="left">
October 2013</td><td align="left">
Burris</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20130346563&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2013/0346563</a></td><td align="left">
December 2013</td><td align="left">
Huang</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140025911&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0025911</a></td><td align="left">
January 2014</td><td align="left">
Sims</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140040371&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0040371</a></td><td align="left">
February 2014</td><td align="left">
Gurevich</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140089296&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0089296</a></td><td align="left">
March 2014</td><td align="left">
Burris</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140089343&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0089343</a></td><td align="left">
March 2014</td><td align="left">
Burris et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140089461&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0089461</a></td><td align="left">
March 2014</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140095509&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0095509</a></td><td align="left">
April 2014</td><td align="left">
Patton</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140164368&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0164368</a></td><td align="left">
June 2014</td><td align="left">
Mitchell et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140195918&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0195918</a></td><td align="left">
July 2014</td><td align="left">
Friedlander</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140207893&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0207893</a></td><td align="left">
July 2014</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140222950&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0222950</a></td><td align="left">
August 2014</td><td align="left">
Rabel</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140256355&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0256355</a></td><td align="left">
September 2014</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140258451&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0258451</a></td><td align="left">
September 2014</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140259113&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0259113</a></td><td align="left">
September 2014</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140274148&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0274148</a></td><td align="left">
September 2014</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140280103&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0280103</a></td><td align="left">
September 2014</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140280569&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0280569</a></td><td align="left">
September 2014</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20140297740&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2014/0297740</a></td><td align="left">
October 2014</td><td align="left">
Narayanan</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20150019648&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2015/0019648</a></td><td align="left">
January 2015</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20150019866&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2015/0019866</a></td><td align="left">
January 2015</td><td align="left">
Braness</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20150020208&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2015/0020208</a></td><td align="left">
January 2015</td><td align="left">
Harris et al.</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20150172396&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2015/0172396</a></td><td align="left">
June 2015</td><td align="left">
Longo</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20150256632&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2015/0256632</a></td><td align="left">
September 2015</td><td align="left">
Harris</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20150381380&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2015/0381380</a></td><td align="left">
December 2015</td><td align="left">
Harris</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20160006783&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2016/0006783</a></td><td align="left">
January 2016</td><td align="left">
Harris</td></tr><tr><td align="left">
<a href="http://appft.uspto.gov/netacgi/nph-Parser?TERM1=20160014219&Sect1=PTO1&Sect2=HITOFF&d=PG01&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.html&r=0&f=S&l=50" target="_blank">2016/0014219</a></td><td align="left">
January 2016</td><td align="left">
Harris</td></tr><tr><td align="left">
</td>
</tr> </tbody></table>
<center><b>Foreign Patent Documents</b></center>
<table width="100%"> <tbody><tr><td></td><th scope="col"></th> <td></td><th scope="col"></th> <td></td><th scope="col"></th></tr> <tr> <td align="left">
</td><td align="left">1 045 345</td><td></td><td align="left">
Oct 2000</td><td></td><td align="left">
EP</td></tr><tr><td align="left">
</td><td align="left">2187594</td><td></td><td align="left">
May 2010</td><td></td><td align="left">
EP</td></tr><tr><td align="left">
</td><td align="left">2293566</td><td></td><td align="left">
Mar 2011</td><td></td><td align="left">
EP</td></tr><tr><td align="left">
</td><td align="left">WO 99/15995</td><td></td><td align="left">
Apr 1999</td><td></td><td align="left">
WO</td></tr><tr><td align="left">
</td><td align="left">WO 2010/049918</td><td></td><td align="left">
May 2010</td><td></td><td align="left">
WO</td></tr><tr><td align="left">
</td><td align="left">WO 2013/133870</td><td></td><td align="left">
Sep 2013</td><td></td><td align="left">
WO</td></tr><tr><td align="left">
</td><td align="left">WO 2013/134451</td><td></td><td align="left">
Sep 2013</td><td></td><td align="left">
WO</td></tr><tr><td align="left">
</td>
</tr> </tbody></table>
<table width="90%"> <tbody><tr><td><align="left"><br/>US. Appl. No. 13/284,455, a non-final Office Action, mailed Jan. 7, 2013, 18 pages. cited by applicant
.<br/>U.S. Appl. No. 13/619,888, a non-final Office Action, mailed Mar. 1, 2013, 15 pages. cited by applicant
.<br/>U.S. Appl. No. 13/708,466, a non-final Office Action, mailed Apr. 17, 2013, 15 pages. cited by applicant
.<br/>U.S. Appl. No. 13/708,516, a non-final Office Action, mailed May 15, 2013, 11 pages. cited by applicant
.<br/>U.S. Appl. No. 13/708,404, a Notice of Allowance, mailed May 24, 2013, 12 pages. cited by applicant
.<br/>U.S. Appl. No. 13/284,455, a non-final Office Action, mailed Jun. 4, 2013, 28 pages. cited by applicant
.<br/>U.S. Appl. No. 13/708,516, a Notice of Allowance, mailed Jun. 7, 2013, 14 pages. cited by applicant
.<br/>U.S. Appl. No. 13/619,888, a Notice of Allowance, mailed Jul. 9, 2013, 10 pages. cited by applicant
.<br/>U.S. Appl. No. 13/788,760, a Notice of Allowance, mailed Jul. 26, 2013, 12 pages. cited by applicant
.<br/>U.S. Appl. No. 13/788,843, a non-final Office Action, mailed Aug. 5, 2013, 17 pages. cited by applicant
.<br/>U.S. Appl. No. 13/788,909, a non-final Office Action, mailed Aug. 12, 2013, 17 pages. cited by applicant
.<br/>U.S. Appl. No. 13/843,949, a non-final Office Action, mailed Aug. 29, 2013, 12 pages. cited by applicant
.<br/>U.S. Appl. No. 13/708,466, a Notice of Allowance, mailed Sep. 3, 2013, 11 pages. cited by applicant
.<br/>U.S. Appl. No. 13/843,832, a non-final Office Action, mailed Sep. 13, 2013, 12 pages. cited by applicant
.<br/>U.S. Appl. No. 13/284,455, a Notice of Allowance, mailed Oct. 4, 2013, 17 pages. cited by applicant
.<br/>U.S. Appl. No. 13/788,843, a final Office Action, mailed Jan. 21, 2014, 25 pages. cited by applicant
.<br/>U.S. Appl. No. 13/843,832, a Notice of Allowance, mailed Jan. 24, 2014, 6 pages. cited by applicant
.<br/>U.S. Appl. No. 13/788,909, a Notice of Allowance, mailed Jan. 24, 2014, 12 pages. cited by applicant
.<br/>U.S. Appl. No. 13/843,949, a Notice of Allowance, mailed Feb. 3, 2014, 11 pages. cited by applicant
.<br/>U.S. Appl. No. 13/843,949, a Notice of Allowance, mailed May 9, 2014, 10 pages. cited by applicant
.<br/>U.S. Appl. No. 13/843,832, a Notice of Allowance, mailed May 20, 2014, 7 pages. cited by applicant
.<br/>U.S. Appl. No. 13/788,909, a Notice of Allowance, mailed Jun. 24, 2014, 11 pages. cited by applicant
.<br/>U.S. Appl. No. 14/089,631, a non-final Office Action, mailed Jul. 8, 2014, 21 pages. cited by applicant
.<br/>U.S. Appl. No. 14/180,473, a non-final Office Action, mailed Jul. 8, 2014, 18 pages. cited by applicant
.<br/>U.S. Appl. No. 14/108,301, a non-final Office Action, mailed Sep. 11, 2014, 10 pages. cited by applicant
.<br/>Chow et al., "Towards Location-Based Social Networking Services", LBSN 2010 Proceedings of the 2nd ACM SIGSPATIAL International Workshop on Location Based Social Networks, Nov. 2, 2010, pp. 31-38. cited by applicant
.<br/>Bao, Jie, et al., "GeoFeed: A Location-Aware News Feed System", IEEE Xplore.RTM. Digital Library, Published in 2012 IEEE 28th International Conference on Data Engineering, Apr. 1-5, 2012, 14 pages. cited by applicant
.<br/>Sarwat, Mohamed, et al., "Sindbad: A Location-Based Social Networking System", SIGMOD '12, Scottsdale, Arizona, May 20-24, 2012, 4 pages. cited by applicant
.<br/>Amitay et al., "Web-a-Where: Geotagging Web Content", Proceedings of the 27th Annual International ACM SIGIR Conference on Research and Development in Information Retrieval (SIGIR), 2004, pp. 273-280. cited by applicant
.<br/>U.S. Appl. No. 14/164,362, a non-final Office Action, mailed Oct. 23, 2014, 15 pages. cited by applicant
.<br/>U.S. Appl. No. 14/180,845, a non-final Office Action, mailed Oct. 23, 2014, 32 pages. cited by applicant
.<br/>U.S. Appl. No. 14/089,631, a final Office Action, mailed Jan. 2, 2015, 8 pages. cited by applicant
.<br/>U.S. Appl. No. 14/180,473, a final Office Action, mailed Jan. 5, 2015, 7 pages. cited by applicant
.<br/>U.S. Appl. No. 14/180,473, a Notice of Allowance, mailed Jan. 27, 2015, 8 pages. cited by applicant
.<br/>U.S. Appl. No. 14/512,293, a non-final Office Action, mailed Jan. 28, 2015, 18 pages. cited by applicant
.<br/>Lee et al., "Tag-Geotag Correlation in Social Networks", Proceedings of the 2008 ACM Workshop on Search in Social Media, 2008, pp. 59-66. cited by applicant
.<br/>U.S. Appl. No. 14/089,631, a Notice of Allowance, mailed Feb. 2, 2015, 10 pages. cited by applicant
.<br/>U.S. Appl. No. 13/788,843, a non-final Office Action, mailed Feb. 20, 2015, 26 pages. cited by applicant
.<br/>U.S. Appl. No. 14/108,301, a Notice of Allowance, mailed Feb. 20, 2015, 13 pages. cited by applicant
.<br/>U.S. Appl. No. 14/164,362, a Notice of Allowance, mailed Feb. 24, 2015, 22 pages. cited by applicant
.<br/>U.S. Appl. No. 14/180,845, a final Office Action, mailed Feb. 25, 2015, 32 pages. cited by applicant
.<br/>U.S. Appl. No. 14/500,881, a non-final Office Action, mailed Sep. 21, 2015, 5 pages. cited by applicant
.<br/>U.S. Appl. No. 14/180,845, a non-final Office Action, mailed Aug. 27, 2015, 43 pages. cited by applicant
.<br/>U.S. Appl. No. 14/512,293, a final Office Action, mailed Aug. 14, 2015, 15 pages. cited by applicant
.<br/>U.S. Appl. No. 14/666,056, a non-final Office Action, mailed Aug. 10, 2015, 17 pages. cited by applicant
.<br/>U.S. Appl. No. 13/788,843, a Notice of Allowance, mailed Dec. 3, 2015, 18 pages. cited by applicant
.<br/>U.S. Appl. No. 14/500,881, a non-final Office Action, mailed Dec. 21, 2015, 24 pages. cited by applicant
.<br/>U.S. Appl. No. 14/512,293, a non-final Office Action, mailed Dec. 9, 2015, 14 pages. cited by applicant
.<br/>U.S. Appl. No. 14/666,056, a Final Office Action, mailed Jan. 4, 2016, 11 pages. cited by applicant
.<br/>U.S. Appl. No. 14/813,031, a non-final Office Action, mailed Nov. 24, 2015, 23 pages. cited by applicant
.<br/>U.S. Appl. No. 14/813,039, a non-final Office Action, mailed Jan. 20, 2016, 20 pages. cited by applicant
.<br/>U.S. Appl. No. 14/180,845, a final Office Action issued by Examiner Augustine Kunle Obisesan, mailed Feb. 22, 2016, 43 pages. cited by applicant. </align="left"></td></tr> </tbody></table><br/><center><b>Other References</b></center> <br/>
<i>Primary Examiner:</i> Somers; Marc
<br/>
<i>Attorney, Agent or Firm:</i> <coma>Sheppard Mullin Richter & Hampton LLP
<br/>
<hr/>
<center><b><i>Parent Case Text</i></b></center>
<hr/>
<br/><br/>RELATED APPLICATIONS
<br/><br/> This application claims priority to U.S. Provisional Patent Application
No. 61/800,951, filed Mar. 15, 2013, which is incorporated by reference
herein in its entirety.
<hr/>
<center><b><i>Claims</i></b></center> <hr/> <br/><br/>What is claimed is: <br/><br/> 1. A system for augmenting a view of a physical space at a geographically definable location with social media content originating from the geographically definable
location, wherein the augmented view includes an augmented space that is coincident with the physical space, the system comprising: one or more processors programmed by one or more computer program instructions that, when executed by the one or more
processors, cause the one or more processors to: obtain (i) a real-time or stored image of a physical space corresponding to a geographically definable location and (ii) reality contextual information that describes a context of the physical space,
wherein the one or more processors are programmed to obtain the reality contextual information from one or more sensors that provide one or more sensor measurements with respect to the physical space, and wherein the one or more sensor measurements
relate to an orientation of a first device that includes the one or more sensors, an ambient condition around the first device, and/or an altitude of the first device; identify the geographically definable location associated with the physical space;
generate a first request to be provided to a first social media content provider, wherein the first request formats the geographically definable location in a first format specific to the first social media content provider; generate a second request to
be provided to a second social media content provider, wherein the second request formats the geographically definable location in a second format specific to the second social media content provider, the second format being different than the first
format; obtain at least a first social media content item relevant to the geographically definable location based on the first formatted request, and first content contextual information that describes a context in which the first social media content
item was created; obtain at least a second social media content item relevant to the geographically definable location based on the second formatted request, and second content contextual information that describes a context in which the second social
media content item was created; compare the reality contextual information with the first content contextual information; determine whether there exists a first match between at least some of the reality contextual information and the first content
contextual information, wherein the first match indicates a match between the context of the physical space and the context in which the first social media content item was created; compare the reality contextual information with the second content
contextual information; determine whether there exists a second match between at least some of the reality contextual information and the second content contextual information, wherein the second match indicates a match between the context of the
physical space and the context in which the second social media content item was created; cause a first graphical representation of the first social media content item to be included in the augmented space responsive to a determination that there exists
the first match between at least some of the reality contextual information and the first content contextual information, wherein the augmented space is coincident with the physical space such that the view is augmented with the augmented space; cause a
second graphical representation of the second social media content item to be included in the augmented space responsive to a determination that there exists the second match between at least some of the reality contextual information and the second
content contextual information; cause the augmented view to be provided to the first device; obtain a second orientation of a second device used to create the first social media content item based on the first content contextual information; determine
that the second orientation is different than the orientation of the first device such that the graphical representation of the first social media content item is not in the augmented view; and cause an indication to be provided on the augmented view
associated with the physical space that indicates a direction in which to move the device to match the second orientation to bring the first graphical representation of the first social media content item into the augmented view.
<br/><br/> 2. The system of claim 1, wherein the one or more sensor measurements relate to an altitude of a device that includes the one or more sensors, and wherein the one or more processors are programmed to: determine that the first social media
content item was created at an altitude that is the same as the altitude of the device.
<br/><br/> 3. The system of claim 1, wherein the one or more sensor measurements relate to an ambient condition of a device that includes the one or more sensors, and wherein the one or more processors are programmed to: determine that the first social
media content item was created at an ambient condition that is the same as the ambient condition of the device.
<br/><br/> 4. The system of claim 1, wherein the one or more sensor measurements relate to an altitude of a device that includes the one or more sensors and an ambient condition related to the device, and wherein the one or more processors are programmed
to: determine that the first social media content item was created at an altitude that is the same as the altitude of the device; and determine that the first social media content item was created at an ambient condition that is the same as the ambient
condition of the device.
<br/><br/> 5. The system of claim 1, wherein the image of the physical space comprises a stored image.
<br/><br/> 6. The system of claim 1, wherein the one or more processors are further programmed to: identify a structure within the physical space; obtain altitude information of the structure based on the reality contextual information; identify at
least one floor of the structure based on the altitude information, wherein the first content contextual information indicates that the first social media content item was created from the at least one floor of the structure; and wherein to cause the
first graphical representation of the first social media content item to be included in the augmented space, the one or more physical processors are further programmed to cause the first graphical representation of the first social media content item to
be included in the augmented space in association with the at least one floor of the structure in the augmented view.
<br/><br/> 7. The system of claim 1, wherein the one or more processors are further programmed to: receive one or more parameters used to filter in or out social media content items, wherein the first formatted request specifies the one or more
parameters.
<br/><br/> 8. A method for augmenting a view of a physical space at a geographically definable location with social media content originating from the geographically definable location, wherein the augmented view includes an augmented space that is
coincident with the physical space, the method being implemented on a computer having one or more processors programmed with one or more computer program instructions that, when executed by the one or more processors, cause the computer to perform the
method, the method comprising: obtaining, by the computer, (i) a real-time or stored image of a physical space corresponding to a geographically definable location, and (ii) reality contextual information that describes a context of the physical space,
wherein the reality contextual information is obtained from one or more sensors that provide one or more sensor measurements with respect to the physical space, and wherein the one or more sensor measurements relate to an orientation of a first device
that includes the one or more sensors, an ambient condition around the first device, and/or an altitude of the first device; identifying, by the computer, the geographically definable location associated with the physical space; generating, by the
computer, a first request to be provided to a first social media content provider, wherein the first request formats the geographically definable location in a first format specific to the first social media content provider; generating, by the
computer, a second request to be provided to a second social media content provider, wherein the second request formats the geographically definable location in a second format specific to the second social media content provider, the second format being
different than the first format; obtaining, by the computer, at least a first social media content item relevant to the geographically definable location based on the first formatted request, and first content contextual information that describes a
context in which the first social media content item was created; obtaining, by the computer, at least a second social media content item relevant to the geographically definable location based on the second formatted request, and second content
contextual information that describes a context in which the second social media content item was created; comparing, by the computer, the reality contextual information with the first content contextual information; determining, by the computer,
whether there exists a first match between at least some of the reality contextual information and the first content contextual information, wherein the first match indicates a match between the context of the physical space and the context in which the
first social media content item was created; comparing, by the computer, the reality contextual information with the second content contextual information; determining, by the computer, whether there exists a second match between at least some of the
reality contextual information and the second content contextual information, wherein the second match indicates a match between the context of the physical space and the context in which the second social media content item was created; causing, by the
computer, a first graphical representation of the first social media content item to be included in the augmented space responsive to a determination that there exists the first match between at least some of the reality contextual information and the
first content contextual information, wherein the augmented space is coincident with the physical space such that the view is augmented with the augmented space; causing, by the computer, a second graphical representation of the second social media
content item to be included in the augmented space responsive to a determination that there exists the second match between at least some of the reality contextual information and the second content contextual information; causing, by the computer, the
augmented view to be provided to the first device; obtaining, by the computer, a second orientation of a second device used to create the first social media content item based on the first content contextual information; determining, by the computer,
that the second orientation is different than the orientation of the first device such that the graphical representation of the first social media content item is not in the augmented view; and causing, by the computer, an indication to be provided on
the augmented view that indicates a direction in which to move the device to match the second orientation to bring the first graphical representation of the first social media content item into the augmented view.
<br/><br/> 9. The method of claim 8, wherein the one or more sensor measurements relate to an altitude of a device that includes the one or more sensors, and wherein the method further comprising: determining, by the computer, that the first social media
content item was created at an altitude that is the same as the altitude of the device.
<br/><br/> 10. The method of claim 8, wherein the one or more sensor measurements relate to an ambient condition of a device that includes the one or more sensors, and wherein the method further comprising: determining, by the computer, that the first
social media content item was created at an ambient condition that is the same as the ambient condition of the device.
<br/><br/> 11. The method of claim 8, wherein the one or more sensor measurements relate to an altitude of a device that includes the one or more sensors and an ambient condition related to the device, and wherein the method further comprising:
determining, by the computer, that the first social media content item was created at an altitude that is the same as the altitude of the device; and determining, by the computer, that the first social media content item was created at an ambient
condition that is the same as the ambient condition of the device.
<br/><br/> 12. The method of claim 8, wherein the one or more processors are further programmed to: receive an indication that the first device has been moved to be in the second orientation; and cause the first graphical representation of the first
social media content item to be provided into the augmented view responsive to the indication that the device has been moved to be in the second orientation. <hr/> <center><b><i>Description</i></b></center> <hr/> <br/><br/>FIELD OF THE INVENTION
<br/><br/> The disclosure relates to systems and methods for augmenting a view of a physical space at a geographically definable location with social media and/or other content originating from the geographically definable location.
<br/><br/>BACKGROUND OF THE INVENTION
<br/><br/> The availability of content such as videos, audio files, photos, text, and/or other content over networks such as the Internet has grown at impressive rates. Many Internet and other online service providers make this type of content available
to enable users to post and share such content through their services. However, various limitations exist with respect to how this vast amount of information can be effectively monitored and/or selectively displayed.
<br/><br/> Because of the vast amount of information and different ways in which to communicate with users, it can be difficult to communicate with creators of the content.
<br/><br/>SUMMARY OF THE INVENTION
<br/><br/> The disclosure relates to systems and methods for augmenting a view of a physical space of one or more geographically definable locations ("geo-locations") with social media and/or other content originating from the one or more geo-locations.
Generally speaking, the system may include a computing device having one or more processors programmed to augment (e.g., add to, overlay, embed, etc.) the view of the physical space of a geo-location with social media content, thereby allowing a user to
view a physical space at a geo-location along with social media content that was created from the geo-location.
<br/><br/> The one or more processors may be programmed by one or more computer program modules. For example, the one or more processors may be configured to execute a geofeed creation module, a content context module, a reality context module, an
augmented reality module, an administration module, a communication module, a user interface module, and/or other modules. The geofeed creation module may be configured to obtain the social media content from one or more social media content providers.
<br/><br/> In some implementations, the content context module may be configured to obtain (e.g., receive, retrieve, or determine) contextual information that describes the context in which the social media content was created. The contextual information
for the content may include a geo-location, an ambient condition (e.g., temperature), an altitude, a motion or orientation based on sensor measurements from a device used to create the content, and/or other information that describes the context in which
the social media content was created. Content context module may be configured to obtain the contextual information from the content itself, such as when the contextual information is available as Exchangeable Image File ("EXIF") data embedded in
images, from the social media content provider, and/or from other sources (e.g., from a user who created the content).
<br/><br/> The computer may be configured to determine social media content that is to augment the view of the physical space based on one or more of the contextual information. The social media content may be filtered in and/or out using various geofeed
parameters (e.g., hashtags, identification of types of content, content providers, etc.) described herein. Thus, a user may indicate that certain content be included for and/or excluded from consideration for augmenting the view of the physical space.
<br/><br/> In some implementations, the reality context module may be configured to obtain contextual information that describes the context of a view of a physical space. The view of the physical space may include an image being displayed in real-time
through a camera lens (e.g., through a display that displays a scene being captured by imaging sensors of a camera), an image that is stored and displayed (e.g., a photograph), and/or other views of a physical space. Contextual information that
describes the context of a view of a physical space may include information similar to contextual information that describes social media content. For example, the contextual information that describes the context of the view of the physical space may
include a geo-location of the physical space (e.g., a current location for real-time implementations and a location at which the view was taken for stored implementations) and/or other contextual information.
<br/><br/> Reality context module may be configured to obtain the reality contextual information from real-time measurements/information (e.g., location information from location sensors, temperature from temperature sensors, etc.). In some
implementations, the reality context module may obtain the location based on image recognition of image features such as buildings, structures, and/or other identifiable objects taken from the view of the physical space.
<br/><br/> In some implementations, the augmented reality module may compare one or more of the content contextual information from content context module and one or more of the reality contextual information from reality context module. The augmented
reality module may determine a match (which may be exact or inexact) between the content contextual information and the reality contextual information from reality context module.
<br/><br/> Upon determining a match, the augmented reality module may augment the view of the physical space of the geo-location. For example, a location at which the social media content was created may be compared to a geo-location of the physical space
being viewed. The augmented reality module may determine that the social media content was created from the geo-location of the physical space being viewed and augment the view of the physical space with the social media content. Other contextual
information may be used instead of or in addition to the location information to determine whether social media content should be used to augment the view of the physical space.
<br/><br/> Some or all of the processing related to the content context module, the reality context module, and the augmented reality module may be performed at a device used to display the view of the physical space and/or at another device.
<br/><br/> By way of example only, in operation, a user may look at a display of the user's mobile device that displays a view of a building and its surroundings at a geo-location, where the view is imaged with a camera of the user's mobile device (the
view may be a picture/video or a live shot of the building and its surroundings). At the mobile device and/or at a remote device, reality contextual information that describes the geo-location may be obtained and social media content may be identified
based on a comparison between content contextual information and the reality contextual information. For example, the mobile device and/or the remote device may identify social media content that was created from the geo-location of the building and its
surroundings being viewed. The mobile device may then augment the view of the buildings and its surroundings with the identified social media content, thereby enhancing the user's view of the physical space.
<br/><br/> These and other objects, features, and characteristics of the system and/or method disclosed herein, as well as the methods of operation and functions of the related elements of structure and the combination of parts and economies of
manufacture, will become more apparent upon consideration of the following description and the appended claims with reference to the accompanying drawings, all of which form a part of this specification, wherein like reference numerals designate
corresponding parts in the various figures. It is to be expressly understood, however, that the drawings are for the purpose of illustration and description only and are not intended as a definition of the limits of the invention. As used in the
specification and in the claims, the singular form of "a", "an", and "the" include plural referents unless the context clearly dictates otherwise. <br/><br/>BRIEF DESCRIPTION OF THE DRAWINGS
<br/><br/> FIG. 1 illustrates a system configured to augment a view of a physical space of one or more geo-locations with social media and other content originating from the one or more geo-locations, according to an aspect of the invention.
<br/><br/> FIG. 2A illustrates an example of a user interface for displaying an augmented view of a physical space of one or more geo-locations with social media and other content originating from the one or more geo-locations, according to an aspect of
the invention.
<br/><br/> FIG. 2B illustrates an example of a user interface for displaying an augmented view of a physical space of one or more geo-locations with social media and other content originating from the one or more geo-locations, according to an aspect of
the invention.
<br/><br/> FIG. 2C illustrates an example of a user interface for displaying an augmented view of a physical space of one or more geo-locations with social media and other content originating from the one or more geo-locations, according to an aspect of
the invention.
<br/><br/> FIG. 3 illustrates a process for augmenting a view of a physical space of geo-locations with social media and other content originating from the one or more geo-location, according to an aspect of the invention.
<br/><br/> FIG. 4 illustrates a process of comparing the context of content with the context of a physical space to be augmented with the content, according to an aspect of the invention.
<br/><br/>DETAILED DESCRIPTION OF THE INVENTION
<br/><br/> FIG. 1 illustrates a system 100 of targeted messaging, workflow management, and digital rights management for geofeeds, according to an aspect of the invention. A geofeed includes a collection of content (also referred to herein as "geofeed
content"), aggregated from various content providers, that is relevant to a geographically definable location (hereinafter, a "geo-location"). The content providers may include, for example, social media providers (e.g., <b><i>FACEBOOK,</i></b> TWITTER, INSTAGRAM,
FLICKR, etc.), online knowledge databases, and/or other providers that can distribute content that may be relevant to a geo-location. The social media providers may include social media platforms that allow a user who is registered with the social media
platform to upload content and share the content with others who have signed up to be associated with the user.
<br/><br/> The geo-location may be specified by a boundary, geo coordinates (e.g., latitude, longitude, altitude/depth), an address, a school, a place name, a point of interest, a zip code, a city, a state, a country, and/or other information that can
spatially identify an area. The content may be generated by content sources such as individuals, corporations, and/or other entities that may create content. As used hereinafter, "a location," "a geo-location," "a geographically definable location,"
and similar language is not limited to a single location but may also refer to one or more such locations.
<br/><br/> In many instances the content can be automatically tagged with information. The information may include a user identification, date/time information related to the content (e.g., a date and/or time that indicates when the content was created,
uploaded, etc.), geographic information that specifies a location where the content was created, uploaded, etc., and/or other information. For example, cameras equipped with a Global Positioning Satellite ("GPS") unit and/or other location-aware system
may embed into an image file latitude/longitude coordinates that indicate where a picture was taken. In addition, modern hand-held devices such as smartphones may be equipped with a GPS sensor, which allows users to generate content with their devices
and share the content through a plurality of social networks and other providers. Moreover, some devices allow users to manually input the foregoing and other information for embedding into the content. Furthermore, editing software may allow a user to
embed or otherwise manually and/or automatically include information along with the content after the content was created.
<br/><br/> System 100 may include a computer 110, a geofeed API 111, a content consumer device 130, provider APIs 140, content providers 150, and/or other components. In some embodiments, computer 110 may include one or more processors 120 configured to
perform some or all of a functionality of a plurality of modules, which may be stored in a memory 121. For example, the one or more processors 120 may be configured to execute a geofeed creation module 112, a content context module 113, a reality
context module 114, an augmented reality module 115, an administration module 116, a communication module 117, a user interface module 118, and/or other modules 119. Geofeed API 111 may be used to interface with computer 110 to interface with computer
110 in relation to the geofeeds.
<br/><br/> Geofeed creation module 112 may be configured to create one or more geofeeds, as described in U.S. patent application Ser. No. 13/284,455 (issued on Feb. 18, 2014 as U.S. Pat. No. 8,655,873), filed Oct. 28, 2011, entitled "SYSTEM AND
METHOD FOR AGGREGATING AND DISTRIBUTING GEOTAGGED CONTENT," and U.S. patent application Ser. No. 13/619,888 (issued on Nov. 26, 2013 as U.S. Pat. No. 8,595,317), filed Sep. 14, 2012, entitled "SYSTEM AND METHOD FOR GENERATING, ACCESSING, AND
UPDATING GEOFEEDS" both of which are incorporated by reference herein in their entireties.
<br/><br/> U.S. patent application Ser. No. 13/708,516 (issued on Feb. 18, 2014 as U.S. Pat. No. 8,655,983), filed Dec. 7, 2012, entitled "SYSTEM AND METHOD FOR LOCATION MONITORING BASED ON ORGANIZED GEOFEEDS," U.S. patent application Ser. No.
13/708,466 (issued on Jan. 28, 2014 as U.S. Pat. No. 8,639,767), filed Dec. 7, 2012, entitled "SYSTEM AND METHOD FOR GENERATING AND MANAGING GEOFEED-BASED ALERTS," U.S. patent application Ser. No. 13/708,404 (issued on Jul. 9, 2013 as U.S. Pat.
No. 8,484,224), filed Dec. 7, 2012, entitled "SYSTEM AND METHOD FOR RANKING GEOFEEDS AND CONTENT WITHIN GEOFEEDS," co-pending U.S. patent application Ser. No. 13/788,843, filed Mar. 7, 2013, entitled "SYSTEM AND METHOD FOR DIFFERENTIALLY PROCESSING A
LOCATION INPUT FOR CONTENT PROVIDERS THAT USE DIFFERENT LOCATION INPUT FORMATS," U.S. patent application Ser. No. 13/788,760 (issued on Dec. 17, 2013 as U.S. Pat. No. 8,612,533), filed Mar. 7, 2013, entitled "SYSTEM AND METHOD FOR CREATING AND
MANAGING GEOFEEDS," and U.S. patent application Ser. No. 13/788,909 (issued on Sep. 30, 2014 as U.S. Pat. No. 8,850,531), filed Mar. 7, 2013, entitled "SYSTEM AND METHOD FOR TARGETED MESSAGING, WORKFLOW MANAGEMENT, AND DIGITAL RIGHTS MANAGEMENT FOR
GEOFEEDS," are all incorporated by reference in their entireties herein.
<br/><br/> U.S. patent application Ser. No. 13/843,949 filed on Mar. 15, 2013 (issued on Oct. 14, 2014 as U.S. Pat. No. 8,862,589) entitled "SYSTEM AND METHOD FOR PREDICTING A GEOGRAPHIC ORIGIN OF CONTENT AND ACCURACY OF GEOTAGS RELATED TO CONTENT
OBTAINED FROM SOCIAL MEDIA AND OTHER CONTENT PROVIDERS," and U.S. patent application Ser. No. 13/843,832 filed on Mar. 15, 2013 (issued on Sep. 30, 2014 as U.S. Pat. No. 8,849,935) entitled "SYSTEM AND METHOD FOR GENERATING THREE-DIMENSIONAL
GEOFEEDS, ORIENTATION-BASED GEOFEEDS, AND GEOFEEDS BASED ON AMBIENT CONDITIONS," are all incorporated by reference in their entireties herein.
<br/><br/> Geofeed creation module 112 may be configured to generate one or more geofeeds based on content that is relevant to one or more geographically definable locations ("geo-locations"). The geofeed creation module may format requests that specify
one or more geo-locations specifically for individual ones of the plurality of content providers and aggregate the content to form a geofeed. In some embodiments, geofeed creation module 112 may create a single geofeed having a plurality of
geo-locations that are grouped with respect to one another. In other embodiments, geofeed creation module 112 may create multiple distinct geofeeds, which may each be associated with one or more geo-locations and may be grouped with respect to one
another. In these embodiments, each set of individual content may correspond to a single geofeed.
<br/><br/> For example, geofeed creation module 112 may format requests to individual ones of a plurality of APIs 140 (illustrated in FIG. 1 as API 140A, 140B, . . . , 140N). The provider APIs 140 may facilitate receiving content from corresponding
content providers 150 (illustrated in FIG. 1 as content providers 150A, 150B, 150C). In some embodiments, geofeed creation module 110 may format a request directly for content provider 150N without using a corresponding API. A content consumer device
130 may request and view geofeeds created by geofeed creation module 112. The formatting instructions may be stored in and recalled from a memory such as provider database 134. Preferences for creating, viewing, and/or managing information related to
geofeeds may be stored in and recalled from a memory such as user profile database 132.
<br/><br/> In some embodiments, geofeed creation module 112 may generate a geofeed definition that describes a geofeed such that a geofeed may be dynamically generated based on the geofeed definition. For example, the geofeed definition may include the
geo-location specification, one or more geofeed parameters used to filter content aggregated from content providers 150, and/or other information related to the geofeed that can be used to aggregate content from various content providers. For example,
the one or geofeed parameters may be used to view only particular types of content, content from particular content providers, and/or other parameter by which to filter in or out content. The geofeed definition may be identified by a geofeed identifier
and stored (e.g., in database 136) for later retrieval so that a content consumer or others may select and obtain a geofeed that was previously defined.
<br/><br/> In some embodiments, geofeed creation module 112 may store the geofeed (e.g., in database 136). For example, geofeed creation module 112 may be configured to store the geofeed by aggregating content from content providers 150 in relation to the
geofeed and store the content in association with a geofeed identifier and/or a geofeed definition.
<br/><br/> In some embodiments, geofeed creation module 112 may use the credentials of a user for social media or other platform to access content. In this manner, geofeed creation module 112 may obtain content from a content provider using the
credentials of the user. For example, geofeed creation module 112 may obtain from the user a username and password (with permission from the user) for the user's TWITTER account and obtain content from TWITTER to which the user has access.
<br/><br/> In some implementations, content context module 113 may be configured to obtain (e.g., receive, retrieve, or determine) contextual information that describes the context in which the social media content was created. The contextual information
for the content may include a geo-location, an ambient condition (e.g., temperature), an altitude, a motion or orientation based on sensor measurements from a device used to create the content, and/or other information that describes the context in which
the social media content was created. Content context module may be configured to obtain the contextual information from the content itself, such as when the contextual information is available as Exchangeable Image File ("EXIF") data embedded in
images, from the social media content provider, and/or from other sources (e.g., from a user who created the content).
<br/><br/> The computer may be configured to determine social media content that is to augment the view of the physical space based on one or more of the contextual information. The social media content may be filtered in and/or out using various geofeed
parameters (e.g., hashtags, identification of types of content, content providers, etc.) described herein. Thus, a user may indicate that certain content be included for and/or excluded from consideration for augmenting the view of the physical space.
<br/><br/> In some implementations, reality context module 114 may be configured to obtain contextual information that describes the context of a view of a physical space. The view of the physical space may include an image being displayed in real-time
through a camera lens (e.g., through a display that displays a scene being captured by imaging sensors of a camera), an image that is stored and displayed (e.g., a photograph), and/or other views of a physical space. Contextual information that
describes the context of a view of a physical space may include information similar to contextual information that describes social media content. For example, the contextual information that describes the context of the view of the physical space may
include a geo-location of the physical space (e.g., a current location for real-time implementations and a location at which the view was taken for stored implementations) and/or other contextual information.
<br/><br/> Reality context module 114 may be configured to obtain the reality contextual information from real-time measurements/information (e.g., location information from location sensors, temperature from temperature sensors, etc.). In some
implementations, reality context module 114 may obtain the location based on image recognition of image features such as buildings, structures, and/or other identifiable objects taken from the view of the physical space.
<br/><br/> In some implementations, augmented reality module 115 may be configured to compare one or more of the content contextual information from content context module and one or more of the reality contextual information from reality context module.
Augmented reality module 115 may determine a match (which may be exact or inexact) between the content contextual information and the reality contextual information from reality context module.
<br/><br/> Upon determining a match, augmented reality module 115 may augment the view of the physical space of the geo-location. For example, a location at which the social media content was created may be compared to a geo-location of the physical space
being viewed. Augmented reality module 115 may determine that the social media content was created from the geo-location of the physical space being viewed and augment the view of the physical space with the social media content. Other contextual
information may be used instead of or in addition to the location information to determine whether social media content should be used to augment the view of the physical space.
<br/><br/> In some implementations, for example, augmented reality module 115 may be configured to compare other contextual information and/or combinations of contextual information such as, for example, ambient conditions, orientations, motion (e.g.,
motion of a device such as a camera device used to create the content), altitude, and/or other contextual information.
<br/><br/> Some or all of the processing related to content context module 113, reality context module 114, and augmented reality module 115 may be performed at a device used to display the view of the physical space and/or at another device.
<br/><br/> In some embodiments, administration module 116 may be configured to manage user accounts, set user roles such as security access roles, and/or perform other administrative operations. For example, the administration module may be used to define
which users may generate messages using the unified message module, generate workflow items, view workflow items of others, annotate content, enter into agreements with respect to ownership rights of the content, and/or set other user roles.
<br/><br/> In some embodiments, communication module 117 may be configured to share a geofeed via a content provider such as a social media provider, email, SMS text, and/or other communication channels. In some embodiments, the communication module may
be configured to communicate a geofeed via various feeds such as Really Simple Syndication ("RSS") and ATOM feeds, a vanity Uniform Resource Locator ("URL") using a name of the geofeed (e.g., a name assigned by the content consumer), and/or other
communication channels.
<br/><br/> In some embodiments, the user interface module 118 may be configured to generate user interfaces that allow viewing and interaction with augmented views of physical spaces. Examples of such user interfaces are illustrated in FIGS. 2A-C. User
interface module 118 may also present various displays for sending/receiving messages, managing workflows, managing information related to ownership rights of the content, displaying geofeeds, requesting geofeeds, and/or otherwise displaying information
related to the content.
<br/><br/> Those having skill in the art will recognize that computer 110 and content consumer device 130 may each comprise one or more processors, one or more interfaces (to various peripheral devices or components), memory, one or more storage devices,
and/or other components coupled via a bus. The memory may comprise random access memory (RAM), read only memory (ROM), or other memory. The memory may store computer-executable instructions to be executed by the processor as well as data that may be
manipulated by the processor. The storage devices may comprise floppy disks, hard disks, optical disks, tapes, or other storage devices for storing computer-executable instructions and/or data.
<br/><br/> One or more applications, including various modules, may be loaded into memory and run on an operating system of computer 110 and/or consumer device 130. In one implementation, computer 110 and consumer device 130 may each comprise a server
device, a desktop computer, a laptop, a cell phone, a smart phone, a Personal Digital Assistant, a pocket PC, or other device.
<br/><br/> Network 102 may include any one or more of, for instance, the Internet, an intranet, a PAN (Personal Area Network), a LAN (Local Area Network), a WAN (Wide Area Network), a SAN (Storage Area Network), a MAN (Metropolitan Area Network), a
wireless network, a cellular communications network, a Public Switched Telephone Network, and/or other network.
<br/><br/> Various inputs, outputs, configurations, and/or other information described herein as being stored or storable may be stored in one or more databases (not illustrated in FIG. 1). Such databases may be, include, or interface to, for example, an
Oracle.TM. relational database sold commercially by Oracle Corporation. Other databases, such as Informix.TM., DB2 (Database 2) or other data storage, including file-based, or query formats, platforms, or resources such as OLAP (On Line Analytical
Processing), SQL (Standard Query Language), a SAN (storage area network), Microsoft Access.TM. or others may also be used, incorporated, or accessed. The database may comprise one or more such databases that reside in one or more physical devices and
in one or more physical locations. The database may store a plurality of types of data and/or files and associated data or file descriptions, administrative information, or any other data.
<br/><br/> FIG. 2A illustrates an example of a user interface 200A for displaying an augmented view of a physical space of one or more geo-locations with social media and/or other content originating from the one or more geo-locations, according to an
aspect of the invention. The user interfaces illustrated in FIGS. 2A-2C are for illustrative purposes only. Various components may be added, deleted, moved, or otherwise changed so that the configuration, appearance, and/or content of the screenshots
may be different than as illustrated in the Figures. Accordingly, the graphical user interface objects as illustrated (and described in greater detail below) are exemplary by nature and, as such, should not be viewed as limiting.
<br/><br/> User interface 200A may display a view of a physical space 210 augmented by an augmented reality ("AR") space 220. AR space 220 may be overlaid onto, embedded within, or otherwise displayed alongside physical space 210 (e.g., a real-world
space) such that graphical objects displayed on AR space 220 coincides with locations on physical space 210. In this manner, graphical objects on AR space 220 may appear to be associated with physical objects (e.g., real-world objects) in physical space
210. Physical space 210 and AR space 220 are illustrated as being separate solely for convenience of illustration.
<br/><br/> A physical object 212 and its surroundings may be presented in a view of the physical space. Reality context module 114 (illustrated in FIG. 1) may obtain reality contextual information related to the view of physical object 212 and/or the
geo-location where the physical object is located.
<br/><br/> For example, reality context module 114 may determine that physical object 212 is located at a particular geo-location. As described herein, the particular geo-location may be determined based on conventional location techniques associated with
a device that is displaying user interface 200A. For example, the device may include GPS sensors and/or other devices that can be used for localization. In some implementations, physical object 212 and/or other feature of physical space 210 may be used
to determine the particular geo-location such as by image recognition and comparison to a database of known objects, for example.
<br/><br/> Whichever location technique is used, augmented reality module 115 may identify social media and/or other content that was created from the particular geo-location. Users may have posted social media content to one or more social media
providers from the particular geo-location. Augmented reality module 115 may identify or otherwise obtain such social media content and provide user interface 200A with the content. For example, user interface module 118 may include AR objects 222
(illustrated in FIG. 2A as AR objects 222A, 222B, . . . , 222N) corresponding to the social media content that was created from the particular geo-location. An AR object 222 when clicked or otherwise selected may cause user interface 200A to provide
additional or detailed in information related to the corresponding social media content. Space permitting, the social media content may be displayed within AR object 222. If too many AR objects are included, they may be merged into an AR object 222.
<br/><br/> In some implementations, if information that indicates that a particular social media content item was created from or nearby physical object 212, a corresponding AR object 222 may be positioned on AR space 220 to correspond to physical object,
thereby providing the user with an indication of this.
<br/><br/> FIG. 2B illustrates an example of a user interface 200B for displaying an augmented view of a physical space of one or more geo-locations with social media and/or other content originating from the one or more geo-locations, according to an
aspect of the invention.
<br/><br/> User interface 200B may provide a view of physical space 210 augmented with AR space 220. Physical object 212 may be visible in the augmented view. In the illustrated implementation, user interface 200B may include an indicator 230 that
indicates that social media content was created at the direction indicated. For example, reality context module 114 may determine an orientation of the device being used to display user interface 200B. Such orientation may be determined based on sensor
information from gyroscopes, accelerometers, magnetometers, and/or other sensors.
<br/><br/> Augmented reality module 115 may determine content created from a geo-location of the physical space and, for example, an orientation at which the content was created. Augmented reality module 115 may determine that social media content is
nearby a user's location but was created in an orientation that is different from the orientation of the device that is displaying user interface 200B.
<br/><br/> Indicator 230 may indicate the direction of where social media content was created. For example, indicator 230 may indicate that social media content was posted while the user who posted the content was in an orientation that is in a direction
as indicated by the indicator. In other words, if the device that displays user interface 200B turns in the direction indicated by indicator 230, the social media content will be made visible in AR space 220. In this manner, a user may explore a given
scenery to determine what previous users may have posted about the given scenery observed from the same perspective (e.g., orientation). For example, hobbyists such as stargazers may gain insight into what previous stargazers may have been observing
from a particular vantage point and orientation (e.g., zenith, azimuth, etc.) toward the sky. Tourists may view what others have posted about a particular scenic view or attraction.
<br/><br/> FIG. 2C illustrates an example of a user interface 200C for displaying an augmented view of a physical space of one or more geo-locations with social media and other content originating from the one or more geo-locations, according to an aspect
of the invention. User interface 200C may provide a view of physical space 210 augmented with AR space 220. Physical object 216 may be visible in the augmented view. In the illustrated implementation, physical object 216 may be a building having
multiple floors, which are individually associated with different altitudes. Reality context module 114 may recognize physical object 216 based on conventional image recognition techniques and estimate a location for each floor by, for example,
recognizing windows or other floor-level indicating features. Reality context module 114 may then assign altitudes for each floor based on predefined estimates distance per floor (e.g., 12 feet per floor).
<br/><br/> Augmented reality module 115 may determine content created from physical object 216 and, for example, at which altitude. Augmented reality module 115 may correlate the altitude at which the social media and/or other content was created with the
altitude of the physical object 216. Based on the correlation, augmented reality module 115 may cause AR objects 224 corresponding to the social media and/or other content to be displayed at their respective altitudes on physical object 216. For
example, hotel or commercial building owners may post social media content (which may include marketing or other branded materials) from their respective buildings at different floors. User interface 200C may be used to then view the building augmented
with the social media posts such that a passerby or other interested users may image the building and obtain an augmented image. Other uses are contemplated as well. For example, a user may enter the building and travel to various floors and receive an
augmented view of each floor based on social media content that was posted from that particular building and that particular floor. Furthermore, although illustrated as a building, physical object 216 may include other types of structures for which
different altitudes may be estimated and/or traversed for augmented views of the physical object.
<br/><br/> FIG. 3 illustrates a process 300 for augmenting a view of a physical space of geo-locations with social media and/or other content originating from the one or more geo-location, according to an aspect of the invention. The various processing
operations and/or data flows depicted in FIG. 3 (and in the other drawing figures) are described in greater detail herein. The described operations may be accomplished using some or all of the system components described in detail above and, in some
embodiments, various operations may be performed in different sequences and various operations may be omitted. Additional operations may be performed along with some or all of the operations shown in the depicted flow diagrams. One or more operations
may be performed simultaneously. Accordingly, the operations as illustrated (and described in greater detail below) are exemplary by nature and, as such, should not be viewed as limiting.
<br/><br/> In an operation 302, a view of the particular scenery may be obtained (e.g., photographed, videographed, imaged live, etc.). In an operation 304, the imaged scenery may be processed. For example, one or more features in the scenery may be
processed using conventional image processing techniques to recognize a location. A location may be recognized based the processing in an operation 306. For example, a landmark such as the Empire State Building may be recognized and a location for the
landmark may be obtained.
<br/><br/> In an operation 308, content created from the location may be obtained. In an operation 310, the content may be used to augment the view of the particular scenery with graphical elements that represent the content created from the location.
<br/><br/> FIG. 4 illustrates a process 400 of comparing contextual information of content with contextual information of a physical space to be augmented with the content, according to an aspect of the invention.
<br/><br/> In an operation 402, a view of a physical space may be obtained (e.g., photographed, videographed, imaged live, etc.). In an operation 404, contextual information related to the physical space may be obtained. The reality contextual
information may include a geo-location, an ambient condition, an altitude, and/or other reality contextual information may be obtained. For example, if the physical space has been imaged and stored as a photograph, the contextual information may be
obtained from EXIF data or other data source that describes the image and/or the physical space. On the other hand, if the physical space is being currently imaged (e.g., live), then the reality contextual information may be obtained from one or more
sensors on-board the device used to image the physical space, other sensors, inputs by an operator of the device, and/or other source of reality contextual information.
<br/><br/> In an operation 406, contextual information that describes the content (e.g., social media content) may be obtained. In an operation 408, a determination of whether contextual information of the content matches the reality contextual
information. Such matching may be exact or in-exact (e.g., within a predefined and/or configurable threshold) and may include matching location, orientation, ambient conditions, altitude, and/or other contextual information that can be automatically
measured or determined. In some implementations, such matching may include matching information provided by users.
<br/><br/> If a match is found, the view of the physical space may be augmented with graphical objects representative of the content whose contextual information matches the reality contextual information in an operation 410. Processing may then proceed
to an operation 412, where a determination of whether more content is available for processing is made. If more content is available, processing may return to operation 406.
<br/><br/> Other embodiments, uses and advantages of the invention will be apparent to those skilled in the art from consideration of the specification and practice of the invention disclosed herein. The specification should be considered exemplary only,
and the scope of the invention is accordingly intended to be limited only by the following claims.
<br/><br/><center><b>* * * * *</b></center>
<hr/>
<center>
<a href="http://pdfpiw.uspto.gov/.piw?Docid=09317600&homeurl=http%3A%2F%2Fpatft.uspto.gov%2Fnetacgi%2Fnph-Parser%3FSect1%3DPTO2%2526Sect2%3DHITOFF%2526u%3D%25252Fnetahtml%25252FPTO%25252Fsearch-adv.htm%2526r%3D780%2526f%3DG%2526l%3D50%2526d%3DPTXT%2526s1%3Dfacebook%2526p%3D16%2526OS%3Dfacebook%2526RS%3Dfacebook&PageNum=&Rtype=&SectionNum=&idkey=NONE&Input=View+first+page"><img alt="[Image]" border="0" src="/netaicon/PTO/image.gif" valign="middle"/></a>
<table>
<tbody><tr><td align="center"><a href="http://ebiz1.uspto.gov/vision-service/ShoppingCart_P/ShowShoppingCart?backUrl1=http%3A//patft.uspto.gov/netacgi/nph-Parser?Sect1%3DPTO2%26Sect2%3DHITOFF%26u%3D%25252Fnetahtml%25252FPTO%25252Fsearch-adv.htm%26r%3D780%26f%3DG%26l%3D50%26d%3DPTXT%26s1%3Dfacebook%26p%3D16%26OS%3Dfacebook&backLabel1=Back%20to%20Document%3A%209317600"><img alt="[View Shopping Cart]" border="0" src="/netaicon/PTO/cart.gif" valign="middle"/></a>
<a href="http://ebiz1.uspto.gov/vision-service/ShoppingCart_P/AddToShoppingCart?docNumber=9317600&backUrl1=http%3A//patft.uspto.gov/netacgi/nph-Parser?Sect1%3DPTO2%26Sect2%3DHITOFF%26u%3D%25252Fnetahtml%25252FPTO%25252Fsearch-adv.htm%26r%3D780%26f%3DG%26l%3D50%26d%3DPTXT%26s1%3Dfacebook%26p%3D16%26OS%3Dfacebook&backLabel1=Back%20to%20Document%3A%209317600">
<img alt="[Add to Shopping Cart]" border="0" src="/netaicon/PTO/order.gif" valign="middle"/></a>
</td></tr>
<tr><td align="center">
<a href="/netacgi/nph-Parser?Sect1=PTO2&Sect2=HITOFF&u=%2Fnetahtml%2FPTO%2Fsearch-adv.htm&r=780&f=S&l=50&d=PTXT&s1=facebook&p=15&Query=facebook"><img alt="[PREV_LIST]" border="0" src="/netaicon/PTO/prevlist.gif" valign="MIDDLE"/></a>
<a href="/netacgi/nph-Parser?Sect1=PTO2&Sect2=HITOFF&u=%2Fnetahtml%2FPTO%2Fsearch-adv.htm&r=780&f=S&l=50&d=PTXT&s1=facebook&p=16&Query=facebook"><img alt="[HIT_LIST]" border="0" src="/netaicon/PTO/hitlist.gif" valign="MIDDLE"/></a>
<a href="/netacgi/nph-Parser?Sect1=PTO2&Sect2=HITOFF&u=%2Fnetahtml%2FPTO%2Fsearch-adv.htm&r=780&f=S&l=50&d=PTXT&s1=facebook&p=17&Query=facebook"><img alt="[NEXT_LIST]" border="0" src="/netaicon/PTO/nextlist.gif" valign="MIDDLE"/></a>
<a href="/netacgi/nph-Parser?Sect1=PTO2&Sect2=HITOFF&u=%2Fnetahtml%2FPTO%2Fsearch-adv.htm&r=779&f=G&l=50&d=PTXT&s1=facebook&p=16&OS=facebook"><img alt="[PREV_DOC]" border="0" src="/netaicon/PTO/prevdoc.gif" valign="MIDDLE"/></a>
<a href="/netacgi/nph-Parser?Sect1=PTO2&Sect2=HITOFF&u=%2Fnetahtml%2FPTO%2Fsearch-adv.htm&r=781&f=G&l=50&d=PTXT&s1=facebook&p=16&OS=facebook"><img alt="[NEXT_DOC]" border="0" src="/netaicon/PTO/nextdoc.gif" valign="MIDDLE"/></a>
<a href="#top"><img alt="[Top]" border="0" src="/netaicon/PTO/top.gif" valign="middle"/></a>
</td></tr>
</tbody></table>
<a name="bottom"></a>
<a href="/netahtml/PTO/index.html"><img alt="[Home]" border="0" src="/netaicon/PTO/home.gif" valign="middle"/></a>
<a href="/netahtml/PTO/search-bool.html"><img alt="[Boolean Search]" border="0" src="/netaicon/PTO/boolean.gif" valign="middle"/></a>
<a href="/netahtml/PTO/search-adv.htm"><img alt="[Manual Search]" border="0" src="/netaicon/PTO/manual.gif" valign="middle"/></a>
<a href="/netahtml/PTO/srchnum.htm"><img alt="[Number Search]" border="0" src="/netaicon/PTO/number.gif" valign="middle"/></a>
<a href="/netahtml/PTO/help/help.htm"><img alt="[Help]" border="0" src="/netaicon/PTO/help.gif" valign="middle"/></a>
</center>
</coma></body></html>