<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Get multiple JSON from a bucket using AWS API  service and convert/save to single CSV in Python Questions</title>
    <link>https://community.esri.com/t5/python-questions/get-multiple-json-from-a-bucket-using-aws-api/m-p/1169545#M64447</link>
    <description>&lt;P&gt;I need assitance with my Python script.&lt;/P&gt;&lt;P&gt;So the code below uses the Boto3 library to get a JSON file from the AWS API and converts/saves it to a CSV.&lt;/P&gt;&lt;P&gt;But what I need help with is getting multiple JSON files and converting/saving them all to a single CSV file, I've achieved this in the past (see bottom block code below) but I'm unsure how to do this with this particular API AWS script.&amp;nbsp;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Note that I will be using the script to get more than 100 files&lt;/P&gt;&lt;P&gt;Can anyone help? View the code below.&amp;nbsp;&lt;/P&gt;&lt;P&gt;So the first file is '&lt;STRONG&gt;data/JSON_GetAll_page_1.txt&lt;/STRONG&gt;', but I need to get:&lt;/P&gt;&lt;P&gt;&lt;STRONG&gt;data/JSON_GetAll_page_2.txt&lt;/STRONG&gt;&lt;/P&gt;&lt;P&gt;&lt;STRONG&gt;data/JSON_GetAll_page_3.txt&lt;/STRONG&gt;&lt;/P&gt;&lt;P&gt;&lt;STRONG&gt;data/JSON_GetAll_page_4.txt&lt;/STRONG&gt;&lt;/P&gt;&lt;P&gt;&lt;STRONG&gt;data/JSON_GetAll_page_5.txt&lt;/STRONG&gt;&lt;/P&gt;&lt;P&gt;View code block 5.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;LI-CODE lang="python"&gt;import boto3

client = boto3.resource('s3',
                        aws_access_key_id = '',
                        aws_secret_access_key = '',
                        region_name = ''
)

import os
data_path = "/saved_csvfile/"
import json

content_object = client.Object('bucket', 'data/JSON_GetAll_page_1.txt')
file_content = content_object.get()['Body'].read().decode('utf-8')
json_content = json.loads(file_content)
print(json_content['payload'])
import pandas as pd
df = pd.DataFrame(json_content['payload'])
print('Converting from JSON to CSV file ...')
for i in range(2,6):
    content_object = client.Object('bucket', 'data/JSON_GetAll_page_{i}.txt')
    file_content = content_object.get()['Body'].read().decode('utf-8')
    new_df = pd.DataFrame(json_content['payload'])

    df = pd.concat([df, new_df])
print('Successfully Converted')
df.to_csv('JSON_GetAll_page_1-5.csv',index=False)
df.to_csv(os.path.join(data_path, 'JSON_GetAll_page_1-5.csv'))
print('CSV Saved!')&lt;/LI-CODE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;STRONG&gt;I have done this in the past with this script:&lt;/STRONG&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;LI-CODE lang="python"&gt;from arcgis.gis import GIS
import pandas as pd
import requests
import os

gis = GIS("home")
url = 'https://()JSON_GetAll_page_1.txt'
data_path = "/arcgis/home/data/"
res = requests.get(url)

df = pd.DataFrame(res.json()['payload'])
print('Converting from JSON to CSV file ...')

for i in range(2,3):
    # res = requests.get(f'https://()JSON_GetAll_page_{i}.txt')
    new_df = pd.DataFrame(res.json()['payload'])

    df = pd.concat([df, new_df])
print('Successfully Converted')

df.to_csv('JSON_GetAll_page_1.csv',index=False)
df.to_csv(os.path.join(data_path, 'JSON_GetAll_page_1.csv'))
print('CSV Saved!')&lt;/LI-CODE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
    <pubDate>Sat, 30 Apr 2022 04:14:22 GMT</pubDate>
    <dc:creator>ChristopherCharles-Noriega</dc:creator>
    <dc:date>2022-04-30T04:14:22Z</dc:date>
    <item>
      <title>Get multiple JSON from a bucket using AWS API  service and convert/save to single CSV</title>
      <link>https://community.esri.com/t5/python-questions/get-multiple-json-from-a-bucket-using-aws-api/m-p/1169545#M64447</link>
      <description>&lt;P&gt;I need assitance with my Python script.&lt;/P&gt;&lt;P&gt;So the code below uses the Boto3 library to get a JSON file from the AWS API and converts/saves it to a CSV.&lt;/P&gt;&lt;P&gt;But what I need help with is getting multiple JSON files and converting/saving them all to a single CSV file, I've achieved this in the past (see bottom block code below) but I'm unsure how to do this with this particular API AWS script.&amp;nbsp;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Note that I will be using the script to get more than 100 files&lt;/P&gt;&lt;P&gt;Can anyone help? View the code below.&amp;nbsp;&lt;/P&gt;&lt;P&gt;So the first file is '&lt;STRONG&gt;data/JSON_GetAll_page_1.txt&lt;/STRONG&gt;', but I need to get:&lt;/P&gt;&lt;P&gt;&lt;STRONG&gt;data/JSON_GetAll_page_2.txt&lt;/STRONG&gt;&lt;/P&gt;&lt;P&gt;&lt;STRONG&gt;data/JSON_GetAll_page_3.txt&lt;/STRONG&gt;&lt;/P&gt;&lt;P&gt;&lt;STRONG&gt;data/JSON_GetAll_page_4.txt&lt;/STRONG&gt;&lt;/P&gt;&lt;P&gt;&lt;STRONG&gt;data/JSON_GetAll_page_5.txt&lt;/STRONG&gt;&lt;/P&gt;&lt;P&gt;View code block 5.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;LI-CODE lang="python"&gt;import boto3

client = boto3.resource('s3',
                        aws_access_key_id = '',
                        aws_secret_access_key = '',
                        region_name = ''
)

import os
data_path = "/saved_csvfile/"
import json

content_object = client.Object('bucket', 'data/JSON_GetAll_page_1.txt')
file_content = content_object.get()['Body'].read().decode('utf-8')
json_content = json.loads(file_content)
print(json_content['payload'])
import pandas as pd
df = pd.DataFrame(json_content['payload'])
print('Converting from JSON to CSV file ...')
for i in range(2,6):
    content_object = client.Object('bucket', 'data/JSON_GetAll_page_{i}.txt')
    file_content = content_object.get()['Body'].read().decode('utf-8')
    new_df = pd.DataFrame(json_content['payload'])

    df = pd.concat([df, new_df])
print('Successfully Converted')
df.to_csv('JSON_GetAll_page_1-5.csv',index=False)
df.to_csv(os.path.join(data_path, 'JSON_GetAll_page_1-5.csv'))
print('CSV Saved!')&lt;/LI-CODE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;STRONG&gt;I have done this in the past with this script:&lt;/STRONG&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;LI-CODE lang="python"&gt;from arcgis.gis import GIS
import pandas as pd
import requests
import os

gis = GIS("home")
url = 'https://()JSON_GetAll_page_1.txt'
data_path = "/arcgis/home/data/"
res = requests.get(url)

df = pd.DataFrame(res.json()['payload'])
print('Converting from JSON to CSV file ...')

for i in range(2,3):
    # res = requests.get(f'https://()JSON_GetAll_page_{i}.txt')
    new_df = pd.DataFrame(res.json()['payload'])

    df = pd.concat([df, new_df])
print('Successfully Converted')

df.to_csv('JSON_GetAll_page_1.csv',index=False)
df.to_csv(os.path.join(data_path, 'JSON_GetAll_page_1.csv'))
print('CSV Saved!')&lt;/LI-CODE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Sat, 30 Apr 2022 04:14:22 GMT</pubDate>
      <guid>https://community.esri.com/t5/python-questions/get-multiple-json-from-a-bucket-using-aws-api/m-p/1169545#M64447</guid>
      <dc:creator>ChristopherCharles-Noriega</dc:creator>
      <dc:date>2022-04-30T04:14:22Z</dc:date>
    </item>
  </channel>
</rss>

