Here you’ll find information about APIs, scripting data downloads, and other developer resources.

Data Download Scripts

 
Download Individual Shapefile/Table from Deschutes County Data Portal (Python 2.x)
  1. Go to the Deschutes County Data Portal to determine the URL of the data you want to download
  2. Find and click the dataset you want to download
  3. Copy the URL. It should look similar to this: http://data.deschutes.org/datasets/28019431cced49849cb4b1793b075bf1_2
  4. Replace the URL in the following Python code block with the one you just copied and add a ".zip" (Shapefile) or ".csv" (table) file extension to the end
    import urllib2, zipfile, os, cgi, time
    
    #-- Parameters
    dataFolder = "C:\\temp\\"
    url = "http://data.deschutes.org/datasets/a7653d7d5c7649cbb9dbe032b021afd9_1.zip"
    
    #-- Code
    def downloadFile(data):
        response = urllib2.urlopen(data)
        if "application/json" in response.headers.get('Content-Type', ''):
            print url + " is being cached on the server.  Script will try again in 3 seconds."
            time.sleep(3)
            downloadFile(data)
        else:
            headerFileName = cgi.parse_header(response.headers.get('Content-Disposition', ''))[1]['filename']
            localFile = dataFolder + headerFileName
            print "Downloading file " + url +" to " + localFile
    
            connection = open(localFile,'wb')
            connection.write(response.read())
            connection.close()
    
            if localFile.lower().endswith(('.zip')):
                print "Unzipping file " + localFile
                unZipper = open(localFile, 'rb')
                zippedFiles = zipfile.ZipFile(unZipper)
                for name in zippedFiles.namelist():
                    print "Extracting " + name
                    zippedFiles.extract(name, dataFolder)
                unZipper.close()
                os.remove(localFile)
    
    #-- Start the download
    downloadFile(url)
    
  5. Optionally, set the output location to somewhere other than C:\temp and use the ArcPy Python library to import/copy the shapefile to a geodatabase.
Download Individual Shapefile/Table from Deschutes County Data Portal (Python 3.x)
  1. Go to the Deschutes County Data Portal to determine the URL of the data you want to download
  2. Find and click the dataset you want to download
  3. Copy the URL. It should look similar to this: http://data.deschutes.org/datasets/28019431cced49849cb4b1793b075bf1_2
  4. Replace the URL in the following Python code block with the one you just copied and add a ".zip" (Shapefile) or ".csv" (table) file extension to the end
    import urllib.request, zipfile, os, cgi, time
    
    #-- Parameters
    dataFolder = "C:\\temp\\"
    url = "http://data.deschutes.org/datasets/a7653d7d5c7649cbb9dbe032b021afd9_1.zip"
    
    #-- Code
    def downloadFile(data):
        response = urllib.request.urlopen(data)
        if "application/json" in response.headers.get('Content-Type', ''):
            print(url + " is being cached on the server.  Script will try again in 3 seconds.")
            time.sleep(3)
            downloadFile(data)
        else:
            headerFileName = cgi.parse_header(response.headers.get('Content-Disposition', ''))[1]['filename']
            localFile = dataFolder + headerFileName
            print("Downloading file " + url +" to " + localFile)
    
            connection = open(localFile,'wb')
            connection.write(response.read())
            connection.close()
    
            if localFile.lower().endswith(('.zip')):
                print("Unzipping file " + localFile)
                unZipper = open(localFile, 'rb')
                zippedFiles = zipfile.ZipFile(unZipper)
                for name in zippedFiles.namelist():
                    print("Extracting " + name)
                    zippedFiles.extract(name, dataFolder)
                unZipper.close()
                os.remove(localFile)
    
    #-- Start the download
    downloadFile(url)
    
  5. Optionally, set the output location to somewhere other than C:\temp and use the ArcPy Python library to import/copy the shapefile to a geodatabase.
Download Multiple Shapefiles/Tables from Deschutes County Data Portal (Python 2.x) into file geodatabase and re-project to state plane
  1. Go to the Deschutes County Data Portal to determine the URL of the data you want to download
  2. Find and click the dataset you want to download
  3. Copy the URL. It should look similar to this: http://data.deschutes.org/datasets/28019431cced49849cb4b1793b075bf1_2
  4. Add the URL to the URLS parameter in the following Python code block with the one you just copied and add a ".zip" (Shapefile) or ".csv" (table) file extension to the end
    import urllib2, zipfile, os, arcpy, cgi
    
    #-- Parameters
    dataFolder = "C:\\temp\\"
    deschutes_FGDB = dataFolder + "Deschutes_FGDB.gdb"
    urls = ["http://data.deschutes.org/datasets/a7653d7d5c7649cbb9dbe032b021afd9_1.zip",
            "http://data.deschutes.org/datasets/d3b57b7fe394408d9695de2044c79d7b_8.csv"]
    
    #-- Code
    def downloadFiles(data):
        files = []
        unprocessedData = []
        for url in data:
            response = urllib2.urlopen(url)
            #-- Test to see if the response is json - indicating data are not yet cached on server.
            if "application/json" in response.headers.get('Content-Type', ''):
                print url + " is being cached on the server.  Script will try again."
                unprocessedData.append(url);
            else:
                headerFileName = cgi.parse_header(response.headers.get('Content-Disposition', ''))[1]['filename']
                localFile = dataFolder + headerFileName
                print "Downloading file " + url +" to " + localFile
    
                connection = open(localFile,'wb')
                connection.write(response.read())
                connection.close()
    
                if localFile.lower().endswith(('.zip')):
                    print "Unzipping file " + localFile
                    unZipper = open(localFile, 'rb')
                    zippedFiles = zipfile.ZipFile(unZipper)
                    for name in zippedFiles.namelist():
                        print "Extracting " + name
                        zippedFiles.extract(name, dataFolder)
                        if name.lower().endswith(('.shp')):
                            files.append(name)
                    unZipper.close()
                    os.remove(localFile)
    
                if localFile.lower().endswith(('.csv')):
                    files.append(headerFileName)
    
        # Create the File GDB if it does not exist
        if not arcpy.Exists(deschutes_FGDB):
            print "Creating " + deschutes_FGDB
            arcpy.CreateFileGDB_management(dataFolder, "Deschutes_FGDB", "10.0")
    
        for file in files:
            print "Importing " + file
            #-- Import as Shapefile
            if file.lower().endswith(('.shp')):
                shapefilePath = dataFolder + "\\" + file
                featurclass = deschutes_FGDB + "\\" + os.path.splitext(file)[0]
                if arcpy.Exists(featurclass):
                    arcpy.Delete_management(featurclass)
                arcpy.Project_management(shapefilePath, featurclass, "PROJCS['NAD_1983_StatePlane_Oregon_South_FIPS_3602_Feet_Intl',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',4921259.842519685],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-120.5],PARAMETER['Standard_Parallel_1',42.33333333333334],PARAMETER['Standard_Parallel_2',44.0],PARAMETER['Latitude_Of_Origin',41.66666666666666],UNIT['Foot',0.3048]]", "WGS_1984_(ITRF00)_To_NAD_1983", "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]", "NO_PRESERVE_SHAPE", "")
                arcpy.Delete_management(shapefilePath)
    
            if file.lower().endswith(('.csv')):
                tablePath = dataFolder + "\\" + file
                featureTable = deschutes_FGDB + "\\" + os.path.splitext(file)[0]
                if arcpy.Exists(featureTable):
                    arcpy.Delete_management(featureTable)
                arcpy.CopyRows_management(tablePath, featureTable, "")
                arcpy.Delete_management(tablePath)
    
        #-- Try downloading any data that were not cached.
        if len(unprocessedData) > 0:
            downloadFiles(unprocessedData)
    
    #-- Start the download
    downloadFiles(urls)
    
  5. Optionally, set the output location to somewhere other than C:\temp.
Download Multiple Shapefiles/Tables from Deschutes County Data Portal (Python 3.x) into file geodatabase and re-project to state plane
  1. Go to the Deschutes County Data Portal to determine the URL of the data you want to download
  2. Find and click the dataset you want to download
  3. Copy the URL. It should look similar to this: http://data.deschutes.org/datasets/28019431cced49849cb4b1793b075bf1_2
  4. Add the URL to the URLS parameter in the following Python code block with the one you just copied and add a ".zip" (Shapefile) or ".csv" (table) file extension to the end
    import urllib.request, zipfile, os, arcpy, cgi
    
    #-- Parameters
    dataFolder = "C:\\temp\\"
    deschutes_FGDB = dataFolder + "Deschutes_FGDB.gdb"
    urls = ["http://data.deschutes.org/datasets/a7653d7d5c7649cbb9dbe032b021afd9_1.zip",
            "http://data.deschutes.org/datasets/d3b57b7fe394408d9695de2044c79d7b_8.csv"]
    
    #-- Code
    def downloadFiles(data):
        files = []
        unprocessedData = []
        for url in data:
            response = urllib.request.urlopen(url)
            #-- Test to see if the response is json - indicating data are not yet cached on server.
            if "application/json" in response.headers.get('Content-Type', ''):
                print(url + " is being cached on the server.  Script will try again.")
                unprocessedData.append(url);
            else:
                headerFileName = cgi.parse_header(response.headers.get('Content-Disposition', ''))[1]['filename']
                localFile = dataFolder + headerFileName
                print("Downloading file " + url +" to " + localFile)
    
                connection = open(localFile,'wb')
                connection.write(response.read())
                connection.close()
    
                if localFile.lower().endswith(('.zip')):
                    print("Unzipping file " + localFile)
                    unZipper = open(localFile, 'rb')
                    zippedFiles = zipfile.ZipFile(unZipper)
                    for name in zippedFiles.namelist():
                        print("Extracting " + name)
                        zippedFiles.extract(name, dataFolder)
                        if name.lower().endswith(('.shp')):
                            files.append(name)
                    unZipper.close()
                    os.remove(localFile)
    
                if localFile.lower().endswith(('.csv')):
                    files.append(headerFileName)
    
    
        # Create the File GDB if it does not exist
        if not arcpy.Exists(deschutes_FGDB):
            print("Creating " + deschutes_FGDB)
            arcpy.CreateFileGDB_management(dataFolder, "Deschutes_FGDB", "10.0")
    
        for file in files:
            print("Importing " + file)
            #-- Import as Shapefile
            if file.lower().endswith(('.shp')):
                shapefilePath = dataFolder + "\\" + file
                featurclass = deschutes_FGDB + "\\" + os.path.splitext(file)[0]
                if arcpy.Exists(featurclass):
                    arcpy.Delete_management(featurclass)
                arcpy.Project_management(shapefilePath, featurclass, "PROJCS['NAD_1983_StatePlane_Oregon_South_FIPS_3602_Feet_Intl',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',4921259.842519685],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-120.5],PARAMETER['Standard_Parallel_1',42.33333333333334],PARAMETER['Standard_Parallel_2',44.0],PARAMETER['Latitude_Of_Origin',41.66666666666666],UNIT['Foot',0.3048]]", "WGS_1984_(ITRF00)_To_NAD_1983", "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]", "NO_PRESERVE_SHAPE", "")
                arcpy.Delete_management(shapefilePath)
    
            if file.lower().endswith(('.csv')):
                tablePath = dataFolder + "\\" + file
                featureTable = deschutes_FGDB + "\\" + os.path.splitext(file)[0]
                if arcpy.Exists(featureTable):
                    arcpy.Delete_management(featureTable)
                arcpy.CopyRows_management(tablePath, featureTable, "")
                arcpy.Delete_management(tablePath)
    
        #-- Try downloading any data that were not cached.
        if len(unprocessedData) > 0:
            downloadFiles(unprocessedData)
    
    #-- Start the download
    downloadFiles(urls)
    
  5. Optionally, set the output location to somewhere other than C:\temp.