clane9/boldgpt_small_patch10.kmq
Updated
subject_id int64 0 1 | trial_id int64 0 27.7k | session_id int64 0 36 | nsd_id int64 5 73k | image imagewidth (px) 256 256 | activity imagewidth (px) 200 200 | subject stringclasses 2
values | flagged bool 2
classes | BOLD5000 bool 2
classes | shared1000 bool 2
classes | coco_split stringclasses 2
values | coco_id int64 30 582k | objects dict | captions sequencelengths 5 6 | repetitions dict |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 0 | 0 | 46,002 | subj01 | false | true | true | train2017 | 412,922 | {
"area": [
5521,
2947,
3020,
41,
763,
322,
2742,
1018,
310,
511,
301,
760,
6409,
7397
],
"bbox": [
[
69.04,
236.63,
138.87,
88.61000000000001
],
[
357.31,
220.97,
67.69,
68.16
],
[
25... | [
"White cows eating grass under trees and the sky",
"Many cows in a pasture with trees eating grass.",
"A herd of cows graze on a field of sparse grass.",
"a herd of white cows grazing on brush among the trees",
"A herd of mostly white cows in a field with some trees."
] | {
"subject1_rep0": 1,
"subject1_rep1": 12803,
"subject1_rep2": 25837,
"subject2_rep0": 1,
"subject2_rep1": 12803,
"subject2_rep2": 25837,
"subject3_rep0": 1,
"subject3_rep1": 12803,
"subject3_rep2": 25837,
"subject4_rep0": 1,
"subject4_rep1": 12803,
"subject4_rep2": 25837,
"subject5_rep0": 1,
... | ||
0 | 1 | 0 | 61,882 | subj01 | false | false | false | train2017 | 474,858 | {
"area": [
9550,
116,
32,
8409,
3234,
16,
96,
28,
23,
167,
960,
4660,
10,
6,
21,
8,
7
],
"bbox": [
[
229.22,
159.33,
74.50000000000003,
233.04
],
[
2.87,
263.44,
7.89,
24.12000000000000... | [
"A man standing near the ocean with surf boards",
"A man standing on the beach beside surf boards",
"People on a beach preparing to go into the water.",
"a man is flying a kite over a beach",
"People standing on the sand near surfboards and the water."
] | {
"subject1_rep0": 2,
"subject1_rep1": 9089,
"subject1_rep2": 13015,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
"... | ||
0 | 2 | 0 | 828 | subj01 | false | false | false | val2017 | 320,696 | {
"area": [
8098,
2585
],
"bbox": [
[
129.96,
158.16,
149.67,
141.53
],
[
162.77,
272.87,
94.08000000000001,
44.73000000000002
]
],
"category": [
"person",
"surfboard"
],
"iscrowd": [
0,
0
],
"segmentation": [
... | [
"A man on a surfboard, who is riding a wave.",
"A man riding a wave on top of a surfboard in the ocean.",
"a young man on a surfboard, surfing a wave",
"A guy in a wet suite riding a surfboard on a wave.",
"Surfer riding out end of wave with large breaking wave in background."
] | {
"subject1_rep0": 3,
"subject1_rep1": 10934,
"subject1_rep2": 11358,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 3 | 0 | 67,573 | subj01 | false | false | false | train2017 | 234,676 | {
"area": [
1803,
211,
31079,
153,
1475,
357
],
"bbox": [
[
95.01,
58.87,
30.629999999999995,
108.16
],
[
408.15,
112.2,
16.850000000000023,
26.700000000000003
],
[
0,
214.1,
254.34,
210.9
],
... | [
"A group of people sitting and standing on top of a sandy beach.",
"A surfboard rests on the beach while people play in the waves",
"A surfboard on the sand and people on the beach behind.",
"A few people are hanging out and appreciating their time. \n",
"A group of people are sitting on the beach shore."
] | {
"subject1_rep0": 4,
"subject1_rep1": 2639,
"subject1_rep2": 3120,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
"s... | ||
0 | 4 | 0 | 16,020 | subj01 | false | false | false | train2017 | 301,595 | {
"area": [
15236,
406
],
"bbox": [
[
149.87,
230.85,
138.77999999999997,
194.15
],
[
389.57,
246.93,
35.43000000000001,
17.49000000000001
]
],
"category": [
"toilet",
"sink"
],
"iscrowd": [
0,
0
],
"segmentation":... | [
"A shower and a white toilet in a small bathroom.",
"A bathroom that has a glass shower door.",
"a bathroom with a toilet and a sink and a shower",
"a bathroom with a glass shower and white toilet",
"A bathroom with a toilet, shower stall, sink and a mirror."
] | {
"subject1_rep0": 5,
"subject1_rep1": 22969,
"subject1_rep2": 25527,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 5 | 0 | 40,422 | subj01 | false | false | false | train2017 | 129,059 | {
"area": [
45320,
50722
],
"bbox": [
[
0,
176.38,
345.18,
198.72000000000003
],
[
64.38,
35,
360.62,
305.83
]
],
"category": [
"zebra",
"zebra"
],
"iscrowd": [
0,
0
],
"segmentation": [
{
"counts": null,... | [
"Two zebra standing next to each other on dirt ground.",
"Two zebras in a exhibit one standing and one laying down.",
"One zebra is laying down and another zebra is standing up.",
"two grown zebra inside some kind of enclosure",
"Pair of Zebras enjoying quiet time in zoo setting."
] | {
"subject1_rep0": 6,
"subject1_rep1": 12766,
"subject1_rep2": 29813,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 6 | 0 | 51,517 | subj01 | false | false | false | train2017 | 172,751 | {
"area": [
59714,
25235,
8307
],
"bbox": [
[
0,
232.98,
425,
192.02
],
[
0,
162.8,
212.5,
262.2
],
[
179.9,
115.69,
128.17999999999998,
141.57
]
],
"category": [
"chair",
"chair",
"bird"
... | [
"A bird perched on the back of an orange chair.",
"a brown black and white bird sitting on an orange object",
"A bird sitting on a orange chair, staring off to the right.",
"A bird that is perched on top of an orange, circular surface.",
"A small bird on an orange chair back."
] | {
"subject1_rep0": 7,
"subject1_rep1": 29772,
"subject1_rep2": 29834,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 7 | 0 | 62,325 | subj01 | false | false | false | train2017 | 481,427 | {
"area": [
1505
],
"bbox": [
[
391.58,
0.7000000000000001,
33.420000000000016,
71.53
]
],
"category": [
"kite"
],
"iscrowd": [
0
],
"segmentation": [
{
"counts": null,
"poly": [
[
424.56,
16.49,
423.16,
... | [
"A group of boats with large giant cranes on top of them.",
"A ship that is picking up containers to take across the ocean",
"Something is splashing in the water near the boats.",
"Four cranes have been set up in the ocean .",
"Four cranes at a dock next to the ocean."
] | {
"subject1_rep0": 8,
"subject1_rep1": 4422,
"subject1_rep2": 12340,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
"... | ||
0 | 8 | 0 | 50,610 | subj01 | false | false | false | train2017 | 169,097 | {
"area": [
3729,
854,
269,
472
],
"bbox": [
[
187.41,
23.58,
45.84,
103.15
],
[
310.45,
114.87,
20.689999999999998,
50.97999999999999
],
[
374.3,
163.29,
13.069999999999993,
27.590000000000003
],
[... | [
"The parking meters are posted beside a cement wall. ",
"A row of parking meters sitting along a cement fence.",
"A bunch of parking tolls near a concrete wall with numbers",
"Parking meters in pavement with corresponding numbered poles.",
"A row of numbered coin meters on a side walk."
] | {
"subject1_rep0": 9,
"subject1_rep1": 2051,
"subject1_rep2": 2219,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
"s... | ||
0 | 9 | 0 | 55,065 | subj01 | false | false | false | train2017 | 186,500 | {
"area": [
60816,
2629,
2195,
1016
],
"bbox": [
[
7.64,
137.76,
336.18,
270.29
],
[
333.11,
168.51,
66.78999999999996,
85.53
],
[
227.15,
166.61,
60.07000000000002,
60.059999999999974
],
[
28... | [
"The hotel room features a contemporary SIMPLEmotif.",
"a bedroom view of a neatly made bed sitting on a hardwood floor",
"a room that has a large bed in it",
"A bed with a pillow and lights on a corner",
"A neat bedroom pairs modern chairs with a glass table."
] | {
"subject1_rep0": 10,
"subject1_rep1": 24264,
"subject1_rep2": 24380,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 10 | 0 | 37,398 | subj01 | false | false | false | train2017 | 379,399 | {
"area": [
64795,
12235
],
"bbox": [
[
234.31,
0,
190.69,
425
],
[
121.21,
0,
156.07,
237.11
]
],
"category": [
"bed",
"chair"
],
"iscrowd": [
0,
0
],
"segmentation": [
{
"counts": null,
"poly": [
... | [
"A chair in the corner of a messy room beside the bed",
"Chair and a bed with unfolded clothes on surfaces",
"A room with close on the floor, bed and chair.",
"A messy room with clothes scattered here and there.",
"A room with a large white bed and a wooden rocking chair."
] | {
"subject1_rep0": 11,
"subject1_rep1": 4901,
"subject1_rep2": 6335,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
"... | ||
0 | 11 | 0 | 18,039 | subj01 | false | false | false | train2017 | 27,706 | {
"area": [
17598,
29922,
100796
],
"bbox": [
[
272,
8.32,
137.42000000000002,
176.55
],
[
282.16,
188.15,
142.83999999999997,
229.21
],
[
0,
34.86,
300.9,
382.95
]
],
"category": [
"cat",
"lapt... | [
"A cat looks intently at a MacBook Pro being unboxed",
"A Siamese cat with a red collar and tag next to a Mac Book Pro.",
"A cat hiding behind a laptop and box",
"The cat is sitting behind the new computer.",
"The cat sits behind the laptop with a strange look on its face."
] | {
"subject1_rep0": 12,
"subject1_rep1": 535,
"subject1_rep2": 19042,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
"... | ||
0 | 12 | 0 | 67,533 | subj01 | false | false | false | train2017 | 234,527 | {
"area": [
3423,
337,
756,
1166,
744,
3033,
971,
393,
179,
193,
824,
135,
239,
17,
127,
132,
98,
103,
141,
145,
1121
],
"bbox": [
[
204.87,
242.9,
96.07999999999998,
107.70000000000002
],
[
... | [
"A batter swinging at a pitch during a baseball game.",
"A baseball player swinging at a pitch during a baseball game.",
"A man with a baseball bat stands at a base.",
"A group of men playing baseball on a field.",
"A baseball player is runnign bases in order to score."
] | {
"subject1_rep0": 13,
"subject1_rep1": 10872,
"subject1_rep2": 11070,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 13 | 0 | 21,822 | subj01 | false | false | false | train2017 | 319,364 | {
"area": [
37298,
15363,
39875,
38351
],
"bbox": [
[
0,
0,
252.56,
152.38
],
[
277.07,
0.73,
147.93,
113.92
],
[
0,
146.87,
425,
122.48000000000002
],
[
0,
245.45,
425,
117.... | [
"A cat is laying in between the keyboard and monitor. ",
"A cat laying on top of a computer keyboard.",
"Large cat laying down under two computer monitors.",
"A cat laying down on a desk with its head under the monitor.",
"A cat that is laying next to a keyboard and monitor."
] | {
"subject1_rep0": 14,
"subject1_rep1": 1428,
"subject1_rep2": 22745,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 14 | 0 | 35,405 | subj01 | false | false | false | train2017 | 18,214 | {
"area": [
4659,
4405,
154,
4609,
3143,
319,
726,
60,
205,
96,
603,
225,
245,
33,
123,
72,
61,
101,
60,
17,
18,
55,
10364
],
"bbox": [
[
78.42,
367.79,
172.91000000000003,
50.96999999999997
... | [
"the image of a street with parked cars on the side walk and there are advertsment signs",
"A street filled with different signs, cars, cyclist, and horse.",
"A donkey crossing an intersection of a shopping area, in front of cars and bike riders.",
"Picture of some cars driving in the street.",
"The tip of ... | {
"subject1_rep0": 15,
"subject1_rep1": 28372,
"subject1_rep2": 29752,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 15 | 0 | 21,690 | subj01 | false | false | false | train2017 | 581,042 | {
"area": [
10753,
855
],
"bbox": [
[
167.37,
158.32,
116.82999999999998,
116.84000000000003
],
[
151.02,
185.84,
14.399999999999977,
91.22
]
],
"category": [
"clock",
"clock"
],
"iscrowd": [
0,
0
],
"segmentation"... | [
"A large golden clock in the middle of a station",
"Large gold clock under an American flag in a public place",
"A golden clock at an information boot backdropped by an American flag.",
"A clock in train station below a flag.",
"There is a large American flag hanging above a clock. "
] | {
"subject1_rep0": 16,
"subject1_rep1": 13219,
"subject1_rep2": 17611,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 16 | 0 | 28,278 | subj01 | false | false | false | train2017 | 81,819 | {
"area": [
8563,
5030,
9289,
423,
388,
597
],
"bbox": [
[
58.22,
246.27,
324.07000000000005,
93.00999999999996
],
[
2.55,
148.78,
410.03999999999996,
36.93000000000001
],
[
172.17,
236.83,
93.46000000000... | [
"A man standing on a sidewalk in front of an easel with a hat on. ",
"A man standing on the sidewalk in a hat painting. ",
"A man on the sidewalk on a sunny day ",
"a man that is standing in front of a road",
"A man is standing in front of an easel making a painting."
] | {
"subject1_rep0": 17,
"subject1_rep1": 46,
"subject1_rep2": 14601,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
"s... | ||
0 | 17 | 0 | 10,459 | subj01 | false | false | false | train2017 | 547,222 | {
"area": [
1201,
69339
],
"bbox": [
[
200.6,
312.4,
65.1,
96.91000000000003
],
[
0.18,
122.6,
424.76,
302.4
]
],
"category": [
"toothbrush",
"person"
],
"iscrowd": [
0,
0
],
"segmentation": [
{
"counts":... | [
"a boy taking a selfie in a mirror while brushing his teeth ",
"A person vigorously brushing their teeth and taking a photo.",
"A man that is brushing his teeth in the mirror.",
"Someone in brushing their teeth while taking a mirror selfie. ",
"a woman taking a picture of herself bushing her teeth"
] | {
"subject1_rep0": 18,
"subject1_rep1": 18663,
"subject1_rep2": 24539,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 18 | 0 | 2,293 | subj01 | false | false | false | val2017 | 88,269 | {
"area": [
31744,
14888,
5685,
92837
],
"bbox": [
[
1.85,
272.46,
304.71999999999997,
152.54000000000002
],
[
275.85,
87.87,
149.14999999999998,
201.51
],
[
230.01,
12.41,
194.99,
116.52000000000001
],... | [
"A plate holding a grilled cheese sandwich and bowl of soup.",
"A bowl of tomato soup is next to grilled cheese.",
"a plate with a bowl of tomato soup and two halves of a grilled cheese sandwich.",
"A bowl of tomato soup and two pieces of grilled cheese.",
"a bowl of tomato soup with two grilled cheese san... | {
"subject1_rep0": 19,
"subject1_rep1": 124,
"subject1_rep2": 21988,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
"... | ||
0 | 19 | 0 | 44,325 | subj01 | false | false | false | train2017 | 406,295 | {
"area": [
17002,
2984,
3987,
15626,
46107
],
"bbox": [
[
241.71,
101.13,
83.46000000000001,
290.48
],
[
146.46,
295.42,
34.45999999999998,
109.84999999999997
],
[
129.58,
194.75,
95.94999999999999,
45... | [
"A couple bottles of wine and a vase of flowers.",
"A vase with flowers, greeting cards and two bottles of wine sitting on a table.",
"Flowers are sitting in the vase next to wine bottles",
"The two bottles of wine are next to greeting cards and a flower vase. ",
"A flower pot with cards, and wine in top of... | {
"subject1_rep0": 20,
"subject1_rep1": 1553,
"subject1_rep2": 2892,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
"... | ||
0 | 20 | 0 | 38,218 | subj01 | false | false | false | train2017 | 120,416 | {
"area": [
26953,
1146,
27
],
"bbox": [
[
107.19,
41.06,
313.25,
301.8
],
[
107.16,
126.69,
38.640000000000015,
52.03
],
[
402.5,
151.82,
10.04000000000002,
9.530000000000001
]
],
"category": [
"pe... | [
"a man plays baseball on a field with grass.",
"a pitcher throwing a pitch from the mound",
"The baseball pitcher in the red cap is starting to throw a baseball.",
"A baseball player throwing a pitch onto the field",
"A baseball player pitching a baseball on top of a field."
] | {
"subject1_rep0": 21,
"subject1_rep1": 192,
"subject1_rep2": 29743,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
"... | ||
0 | 21 | 0 | 30,032 | subj01 | false | false | false | train2017 | 151,345 | {
"area": [
56914
],
"bbox": [
[
0,
109,
425,
316
]
],
"category": [
"motorcycle"
],
"iscrowd": [
0
],
"segmentation": [
{
"counts": null,
"poly": [
[
425,
370.69,
425,
375.46,
425,
... | [
"A motorcycle parked on the side of the road with a bridge in the background.",
"Black motor cycle parked on the side of the road with the view of a bridge in the background.",
"A motorcycle in front of a bridge and mountains. ",
"A motorcycle overlooking the Golden Gate Bridge in San Francisco.",
"a motorc... | {
"subject1_rep0": 22,
"subject1_rep1": 11379,
"subject1_rep2": 11407,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 22 | 0 | 65,255 | subj01 | false | false | false | train2017 | 225,755 | {
"area": [
40492,
44438,
3102,
3289
],
"bbox": [
[
223.67,
32.62,
163.62000000000003,
392.38
],
[
34.96,
42.3,
200.7,
382.7
],
[
151.51,
281.73,
67.45000000000002,
69.16999999999996
],
[
255.... | [
"A couple of men standing next to each other holding catchers mitts.",
"Twp professional baseball players running side by side.",
"Two baseball players wearing gloves on a field.",
"two members of the Mariners walking next each other and talking",
"The two baseball players are talking with each other"
] | {
"subject1_rep0": 23,
"subject1_rep1": 220,
"subject1_rep2": 24517,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
"... | ||
0 | 23 | 0 | 64,919 | subj01 | false | false | false | train2017 | 486,655 | {
"area": [
51743
],
"bbox": [
[
179.56,
33.42,
245.44,
390.62
]
],
"category": [
"giraffe"
],
"iscrowd": [
0
],
"segmentation": [
{
"counts": null,
"poly": [
[
425,
413.54,
395.4,
316.12,
... | [
"A giraffe with its eyes half closed near a tree branch.",
"A giraffe next to a bare tree branch.",
"A giraffe head sitting next to a branch.",
"a giraffe standing next to a small branch",
"A giraffe eating near a tree in the sunlight. "
] | {
"subject1_rep0": 24,
"subject1_rep1": 421,
"subject1_rep2": 29715,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
"... | ||
0 | 24 | 0 | 12,469 | subj01 | false | false | false | train2017 | 290,969 | {
"area": [
16779,
4896
],
"bbox": [
[
97.11,
0,
272.77,
244.21
],
[
143.67,
0.71,
111.55000000000001,
282.25
]
],
"category": [
"person",
"skateboard"
],
"iscrowd": [
0,
0
],
"segmentation": [
{
"counts"... | [
"A girl's bare feet on top of a skateboard.",
"A woman's bare feet are standing on a skateboard.",
"A person in bear feet standing on a skateboard. ",
"A picture of some peoples feet and shoes.",
"A pair of bare feet standing on a skateboard."
] | {
"subject1_rep0": 25,
"subject1_rep1": 10792,
"subject1_rep2": 29675,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 25 | 0 | 43,078 | subj01 | false | false | false | train2017 | 401,403 | {
"area": [
21099,
16544,
50
],
"bbox": [
[
168.08,
150.32,
204.9,
274.68
],
[
109.52,
217.84,
247.04000000000002,
207.16
],
[
254.66,
247.87,
13.340000000000003,
6.8799999999999955
]
],
"category": [
... | [
"This man is sitting on a bench next to a tree.",
"a person sitting on a bench under a tree",
"A man sitting on a park bench looking at something he's holding.",
"A man sits on a green bench situated on a forest path.",
"A young man sitting on a bench working on a laptop."
] | {
"subject1_rep0": 26,
"subject1_rep1": 5962,
"subject1_rep2": 22421,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 26 | 0 | 8,926 | subj01 | false | false | false | train2017 | 542,630 | {
"area": [
7,
54,
281,
947,
309,
127,
21,
9,
6,
717,
261,
672,
484,
248,
21,
84,
518,
39232
],
"bbox": [
[
186.26,
334.58,
3.060000000000002,
2.990000000000009
],
[
192.27,
333.61,
17.419... | [
"A crowd watching a baseball game at a stadium.",
"Baseball players are playing on the field at a baseball game. ",
"an over head view of some people playing baseball ",
"A group of people that are on a baseball field.",
"Group of people playing a game of baseball on a sports field. "
] | {
"subject1_rep0": 27,
"subject1_rep1": 9404,
"subject1_rep2": 13628,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 27 | 0 | 9,860 | subj01 | false | false | false | train2017 | 21,079 | {
"area": [
369,
100,
15620,
3321,
12436,
9055,
573,
665,
201,
205,
324,
208,
461,
329,
590,
265,
164,
124,
86,
44,
2343,
857,
254,
355,
1143,
225,
59,
2048,
2085,
3984,
2743,
583,
589,
... | [
"A group is gathered at a long table for a meal.",
"A group of people are sitting around a table eating.",
"Group of people eating on a long table. ",
"A long table filled with people eating a meal",
"a bunch of peope at a really long tasble eating"
] | {
"subject1_rep0": 28,
"subject1_rep1": 7105,
"subject1_rep2": 18823,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 28 | 0 | 48,617 | subj01 | false | true | true | train2017 | 161,062 | {
"area": [
28415
],
"bbox": [
[
26.89,
118.47,
398.11,
177.35999999999999
]
],
"category": [
"airplane"
],
"iscrowd": [
0
],
"segmentation": [
{
"counts": null,
"poly": [
[
425,
128.45,
425,
12... | [
"A plane on the runway under cloudy skies.",
"Airplane boards on an extremely dark and gloomy day.",
"There is a plane pulled into a port under the clouds.",
"An American Airlines airplane is preparing for take off. ",
"An airplane sits at the airport waiting to be loaded."
] | {
"subject1_rep0": 29,
"subject1_rep1": 8174,
"subject1_rep2": 28951,
"subject2_rep0": 29,
"subject2_rep1": 8174,
"subject2_rep2": 28951,
"subject3_rep0": 29,
"subject3_rep1": 8174,
"subject3_rep2": 28951,
"subject4_rep0": 29,
"subject4_rep1": 8174,
"subject4_rep2": 28951,
"subject5_rep0": 29,... | ||
0 | 29 | 0 | 18,699 | subj01 | false | false | false | train2017 | 47,654 | {
"area": [
21303,
487,
53785,
9109,
3955,
345,
1488,
199,
38023,
663,
2673,
1498,
6810,
7859
],
"bbox": [
[
5.58,
234.45,
178.63,
190.55
],
[
314.45,
57.23,
32.579999999999984,
41.06000000000001
... | [
"a couple of kids are holding stuffed animals",
"A man kneeling down next to two girls.",
"Guy posing for picture with two cute little girls holding stuffed animals",
"The young father is posing with his two children.",
"a father and his two daughters which are holding stuffed animals"
] | {
"subject1_rep0": 30,
"subject1_rep1": 9837,
"subject1_rep2": 27870,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 30 | 0 | 52,707 | subj01 | false | false | false | train2017 | 177,314 | {
"area": [
97006,
24820,
10483
],
"bbox": [
[
17.19,
78.02,
407.81,
338.09000000000003
],
[
0,
0,
196.74,
175.73
],
[
0.99,
0,
142.32999999999998,
99.63
]
],
"category": [
"bowl",
"bowl",
"... | [
"A white bowl filled with mushroom and veggie soup.",
"a bowl full of vegetables and potatoes with broth",
"Bowl of stew with potatoes, carrots, dark broth.",
"a bunch of food in a small white bowl",
"A bunch of vegetables that are sitting in a bowl."
] | {
"subject1_rep0": 31,
"subject1_rep1": 23622,
"subject1_rep2": 29256,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 31 | 0 | 65,060 | subj01 | false | false | false | train2017 | 487,222 | {
"area": [
705
],
"bbox": [
[
77.45,
98.8,
75.21999999999998,
24.960000000000008
]
],
"category": [
"airplane"
],
"iscrowd": [
0
],
"segmentation": [
{
"counts": null,
"poly": [
[
152.67,
114.09,
145.18,... | [
"there's and airplane in the sky flying over some trees",
"a large plane is flying over a crowd of trees",
"A aeroplane soaring high in the sky above the trees.",
"A passenger plane flies in the sky over a forest. ",
"An airplane is seen flying over several trees."
] | {
"subject1_rep0": 32,
"subject1_rep1": 7491,
"subject1_rep2": 29912,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 32 | 0 | 47,923 | subj01 | false | false | false | train2017 | 420,485 | {
"area": [
22654,
373,
530,
245,
96
],
"bbox": [
[
139.78,
214.73,
148.67999999999998,
188.07000000000002
],
[
353.41,
273.03,
36.90999999999997,
14.420000000000016
],
[
308.16,
267.2,
40.5,
17.1500000... | [
"a black train is coming down the tracks",
"A steam train following a track that runs alongside a highway.",
"Train with smoke emerging from engine travelling near road.",
"a black and red trains engine grass and trees",
"A train speeds down the tracks as smoke shoots out the top."
] | {
"subject1_rep0": 33,
"subject1_rep1": 21815,
"subject1_rep2": 22080,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 33 | 0 | 54,482 | subj01 | false | false | false | train2017 | 184,143 | {
"area": [
319,
1197
],
"bbox": [
[
241.55,
219.19,
47.26999999999998,
20.099999999999994
],
[
173.99,
198.45,
41.73999999999998,
43.48000000000002
]
],
"category": [
"surfboard",
"person"
],
"iscrowd": [
0,
0
],
... | [
"A person on a surf board in the water",
"A surf boarder waiting in the water for a wave.",
"A person riding a board in the middle of the ocean.",
"a surfer is out at sea waiting for a wave to come",
"A person is floating on a surf board in the middle of the ocean."
] | {
"subject1_rep0": 34,
"subject1_rep1": 7822,
"subject1_rep2": 29961,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 34 | 0 | 58,816 | subj01 | false | false | false | train2017 | 200,945 | {
"area": [
113837
],
"bbox": [
[
1.34,
43.93,
419.99,
381.07
]
],
"category": [
"hot dog"
],
"iscrowd": [
0
],
"segmentation": [
{
"counts": null,
"poly": [
[
4,
326.59,
21.33,
378.59,
... | [
"A hot dog and a pickle on a tray.",
"A hotdog that is sitting in a bun on a paper.",
"A hot dog with pickles and tomatoes. ",
"there is a sandwich with many different foods in it",
"This is a hot dog with tomato, mustard, onion, relish, and pickle on it."
] | {
"subject1_rep0": 35,
"subject1_rep1": 14108,
"subject1_rep2": 29640,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 35 | 0 | 44,980 | subj01 | false | true | true | train2017 | 408,965 | {
"area": [
5516
],
"bbox": [
[
0,
106.38,
185.38,
69.47
]
],
"category": [
"airplane"
],
"iscrowd": [
0
],
"segmentation": [
{
"counts": null,
"poly": [
[
0,
112.28,
0,
130.63,
28.09,... | [
"A passenger jet coming in for a landing over a big city.",
"An aeroplane flying in the sky over the buildings at sunset.",
"An airplane flying in the air above a city.",
"an airplane flying about many tall buildings and cars ",
"A blue jet airliner flying over a city."
] | {
"subject1_rep0": 36,
"subject1_rep1": 190,
"subject1_rep2": 16280,
"subject2_rep0": 36,
"subject2_rep1": 190,
"subject2_rep2": 16280,
"subject3_rep0": 36,
"subject3_rep1": 190,
"subject3_rep2": 16280,
"subject4_rep0": 36,
"subject4_rep1": 190,
"subject4_rep2": 16280,
"subject5_rep0": 36,
"... | ||
0 | 36 | 0 | 12,258 | subj01 | false | false | false | train2017 | 552,461 | {
"area": [
4372,
1094,
108,
137,
208,
159,
1259,
97498,
857,
104,
556,
113,
109,
205
],
"bbox": [
[
133.71,
298.77,
59.20999999999998,
122.25
],
[
340.2,
270.49,
28.930000000000007,
44.990000000000... | [
"A man wearing glasses and a gray jacket and neck tie.",
"The man in the suit poses in front of a dining room.",
"A man is wearing glasses a suit and a tie.",
"A man wearing a suit, tie, and glasses stands near a nice dining room.",
"A man wearing eye glasses is staring at the camera in front of a room."
] | {
"subject1_rep0": 37,
"subject1_rep1": 7199,
"subject1_rep2": 19272,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 37 | 0 | 2,548 | subj01 | false | false | false | val2017 | 529,568 | {
"area": [
5378,
17571,
253,
229,
349,
212,
100,
185,
3884,
8124,
268,
1024,
827,
265,
218,
177,
202,
144,
229,
162,
27,
71,
330,
95,
120,
56,
118,
165,
35,
126,
7,
104,
450,
565,
... | [
"A big fancy glass chandelier hanging in a small kitchen.",
"A kitchen area with a counter, shelves and a television.",
"a country kitchen has a blue, wooden table, open shelves and white counters and a white sink.",
"A nice kitchen has a large crystal chandelier.",
"The old kitchen and the table in it are ... | {
"subject1_rep0": 38,
"subject1_rep1": 6795,
"subject1_rep2": 20027,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 38 | 0 | 11,018 | subj01 | false | false | false | train2017 | 548,913 | {
"area": [
24728,
4419,
28488
],
"bbox": [
[
4.77,
161.15,
140.07,
259.20000000000005
],
[
121.29,
232.07,
119.37999999999998,
69.73000000000002
],
[
225.34,
168.38,
199.66,
168.29000000000002
]
],
"ca... | [
"Person cutting a piece of cake off of a small white plate. ",
"A close up of a person cutting in to a piece of cake.",
"a man holding paper plate with a slice of cake",
"A person loading a bite of cake onto a fork.",
"Closeups of hands holding a plate of cake."
] | {
"subject1_rep0": 39,
"subject1_rep1": 610,
"subject1_rep2": 12339,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
"... | ||
0 | 39 | 0 | 23,358 | subj01 | false | false | false | train2017 | 325,211 | {
"area": [
37013,
126297,
52251
],
"bbox": [
[
2.59,
215.73,
422.41,
113.97999999999999
],
[
0,
0,
425,
336.49
],
[
1.5,
288.13,
423.5,
136.87
]
],
"category": [
"keyboard",
"laptop",
"book... | [
"I really cant see this image very well.",
"An open book in front of a keyboard and monitor ",
"A book is open in front of a keyboard. ",
"very blurry picture of what looks like 2 open books",
"Someone is taking notes or an open book test on the computer."
] | {
"subject1_rep0": 40,
"subject1_rep1": 5637,
"subject1_rep2": 20600,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 40 | 0 | 39,386 | subj01 | false | false | false | train2017 | 387,256 | {
"area": [
44831,
43869,
4301,
15928,
340
],
"bbox": [
[
201.48,
30.63,
223.52,
389.58
],
[
34.92,
50.48,
210.12,
369.85999999999996
],
[
60.39,
217.47,
80.22000000000001,
160.30999999999997
],
... | [
"two women posing for a picture while holding teddy bears ",
"Two women who are each holding a teddy bear.",
"A couple of women standing in a hall with stuffed bears.",
"Two women who are holding stuffed white bears.",
"two woman in a hallway holding teddy bears"
] | {
"subject1_rep0": 41,
"subject1_rep1": 10634,
"subject1_rep2": 29932,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
... | ||
0 | 41 | 0 | 17,681 | subj01 | false | false | false | train2017 | 306,688 | {
"area": [
170135,
1819,
945,
432,
375,
406,
68422,
518,
805,
1006,
781,
414,
535,
4235,
618
],
"bbox": [
[
0,
0,
425,
419.28
],
[
205.79,
27.75,
216.95000000000002,
41.68000000000001
],
... | [
"A white pizza sliced in on a board along with two plates, utensils and pink napkins.",
"A small personal sized pizza with carrots and peas. ",
"Pizza is on a cutting board with table settings around it.",
"A nontraditional pizza is topped with peas and carrots.",
"A pizza with green peas and carrots on top... | {
"subject1_rep0": 42,
"subject1_rep1": 230,
"subject1_rep2": 29848,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
"... | ||
0 | 42 | 0 | 61,819 | subj01 | false | false | false | train2017 | 212,479 | {
"area": [
48369,
568
],
"bbox": [
[
0,
97.35,
425,
261.19000000000005
],
[
315.79,
181.04,
38.22999999999996,
25.400000000000006
]
],
"category": [
"airplane",
"person"
],
"iscrowd": [
0,
0
],
"segmentation": [
... | [
"Military aircraft waiting for clearance to take off on the runway.",
"A pilot in a fighter jet sits on a runway.",
"A military jet sitting on a runway in the sun",
"A stealthy plane sits on a lot surrounded by grass.",
"A fighter jet waiting to take off on a runway."
] | {
"subject1_rep0": 43,
"subject1_rep1": 8730,
"subject1_rep2": 9105,
"subject2_rep0": 0,
"subject2_rep1": 0,
"subject2_rep2": 0,
"subject3_rep0": 0,
"subject3_rep1": 0,
"subject3_rep2": 0,
"subject4_rep0": 0,
"subject4_rep1": 0,
"subject4_rep2": 0,
"subject5_rep0": 0,
"subject5_rep1": 0,
"... |
A Hugging Face dataset of pre-processed brain activity flat maps from the Natural Scenes Dataset, constrained to a visual cortex region of interest and rendered as PNG images.
Load the dataset from Hugging Face Hub
from datasets import load_dataset
dataset = load_dataset("clane9/NSD-Flat", split="train")
Run download_data.sh to download the required source data:
bash download_data.sh
Run convert_nsd_annotations.py to crop and reorganize the COCO annotations for NSD.
python convert_nsd_annotations.py
Run generate_dataset.py to generate the huggingface dataset in Arrow format.
python generate_dataset.py --img_size 256 --workers 8
If you find this dataset useful, please consider citing:
@article{allen2022massive,
title = {A massive 7T fMRI dataset to bridge cognitive neuroscience and artificial intelligence},
author = {Allen, Emily J and St-Yves, Ghislain and Wu, Yihan and Breedlove, Jesse L and Prince, Jacob S and Dowdle, Logan T and Nau, Matthias and Caron, Brad and Pestilli, Franco and Charest, Ian and others},
journal = {Nature neuroscience},
volume = {25},
number = {1},
pages = {116--126},
year = {2022},
publisher = {Nature Publishing Group US New York}
}
@misc{lane2023nsdflat,
author = {Connor Lane},
title = {NSD-Flat: Pre-processed brain activity flat maps from the Natural Scenes Dataset},
howpublished = {\url{https://huggingface.co/datasets/clane9/NSD-Flat}},
year = {2023},
}
Usage of this dataset constitutes agreement to the NSD Terms and Conditions.