- ssh your remote server:
ssh username@serverip;
- source a Python environment which has the Jupyter installed;
- execute:
$ jupyter notebook --no-browser --port=8889 --NotebookApp.allow_origin="*" --ip=serverip --NotebookApp.token=''
| import pandas as pd | |
| # define the link to the CSV file on Microsoft's GitHub repo | |
| csv_path = r'https://raw.githubusercontent.com/microsoft/GlobalMLBuildingFootprints/main/dataset-links.csv' | |
| # read the file | |
| df = pd.read_csv(csv_path) | |
| # check basic details | |
| print('Columns in CSV:', df.columns) |
| // define a function to pull Landsat data for a given footprint | |
| var band_names = ['Blue', 'Green', 'Red', 'NIR', 'SWIR1', 'Thermal', 'SWIR2']; | |
| function get_landsat_for_aoi(aoi){ | |
| return ee.ImageCollection("LANDSAT/LT05/C02/T1_L2").filterBounds(aoi) | |
| // fetching scenes for Jan-May of 2010 and 2011 | |
| .filter(ee.Filter.calendarRange(2010, 2011,'year')).filter(ee.Filter.calendarRange(1,5,'month')) | |
| .select(['SR_B1', 'SR_B2', 'SR_B3', 'SR_B4', 'SR_B5', 'ST_B6', 'SR_B7'], | |
| band_names) | |
| .filter(ee.Filter.lte('CLOUD_COVER', 30)) | |
| .reduce(ee.Reducer.percentile([25])) |
| // extract the unique row numbers from the landsat footprint collection | |
| var unique_paths = landsat_footprints.aggregate_array('path').distinct(); | |
| print('Unique paths:', unique_paths) | |
| // seggregate the landsat footprints by path | |
| var path_wise_footprints = unique_paths.map(function a(element){ | |
| return landsat_footprints.filter(ee.Filter.eq('path', element)); | |
| }); | |
| print('Path wise footprints', path_wise_footprints) |
| // perform negative buffer of 80 km to the polygons | |
| landsat_footprints = landsat_footprints.map(function (x){return x.buffer(-8e4, 5)}) | |
| Map.addLayer(landsat_footprints, {}, 'landsat_footprints') |
| // pull landsat footprint and filter for your area of interest | |
| var landsat_footprints = ee.FeatureCollection("users/pratyusht/public_assets/landsat_footprints") | |
| .filter(ee.Filter.and( | |
| ee.Filter.lte('path', 148), ee.Filter.gte('path', 140), | |
| ee.Filter.lte('row', 46), ee.Filter.gte('row', 42) | |
| )) | |
| Map.addLayer(landsat_footprints, {}, 'landsat_footprints') | |
| Map.centerObject(landsat_footprints, 6) |
| // call the function to match the histogram of slave | |
| var hist = require('users/pratyusht/PublicFunctions:seamless_mosaic/histogram'); | |
| var matched_slave = hist.match_histogram(master, slave, overlap, band_names, 30); | |
| Map.addLayer(matched_slave, l8_vis_params, 'Matched Slave'); |
| // Get footprint of the landsat scenes | |
| var master_fp = master.geometry(); | |
| var slave_fp = slave.geometry(); | |
| // Perform intersection to extarct overlap area | |
| var overlap = master_fp.intersection(slave_fp); | |
| // Add all the three layers to the map | |
| Map.addLayer(master_fp, {}, 'Master Footprint'); | |
| Map.addLayer(slave_fp, {}, 'Slave Footprint'); |
| // query landsat 8 collection for given AOI | |
| var band_names = ['Blue', 'Green', 'Red', 'NIR', 'SWIR1', 'Thermal', 'SWIR2']; | |
| var landsat8 = ee.ImageCollection("LANDSAT/LC08/C02/T1_L2") | |
| .filterBounds(geometry) | |
| .filterDate('2020-03-15', '2020-03-25') | |
| .filter(ee.Filter.lte('CLOUD_COVER', 5)) | |
| .select( | |
| ['SR_B2', 'SR_B3', 'SR_B4', 'SR_B5', 'SR_B6', 'ST_B10', 'SR_B7'], | |
| band_names | |
| ); |
| # define the output file names | |
| mx_outfile = mx_file.replace('.tif', '_byte_deflate.tif') | |
| sb_outfile = sb_file.replace('.tif', '_byte_deflate.tif') | |
| # export the files | |
| raster.export(arr_mx, ds_mx, filename=mx_outfile, dtype='uint8', compress='DEFLATE') | |
| raster.export(arr_sb, ds_sb, filename=sb_outfile, dtype='uint8', compress='DEFLATE') |