Could anyone help in suggesting how to fetch the top n rows recursively from a file.
The requirement is download content from SharePoint site which has xml content and that should be converted to json data file. So each time we download, it should pick first 1000 rows and write it back to one json file, and in next iteration it should fetch next 1000 rows recursively and write the data to second json file and so on until the entire content from webpage is completely loaded into n number of files.
Here is the PowerShell script for reference and we have to code it in Python Scripting..
*elseif($fileName -eq "ProjectDataURLs")
{
Write-Host "Generating JSON files for Project data PWAEng" -ForegroundColor DarkYellow
foreach($ProjectData in $importProjectDataFile)
{
$fullURL=$ProjectData.ProjectData
$location = $ProjectData.ProjectData
$date=(Get-Date).ToString("yyyyMMdd")
[int]$count=0
do{
$fileName="file_"+$location+"_"+$count+"_"+$date
# output file
[string]$output = $FilePath+"\"+$fileName+".json"
#Write-Host $output
$task=$fullURL
# load client assemblies
Add-Type -Path ([System.Reflection.Assembly]::LoadWithPartialName("Microsoft.SharePoint.Client").location)
Add-Type -Path ([System.Reflection.Assembly]::LoadWithPartialName("Microsoft.SharePoint.Client.runtime").location)
$spoCred = New-Object Microsoft.SharePoint.Client.SharePointOnlineCredentials($userName,$Password)
$url = 'https://myorg.sharepoint.com/sites/pwaeng/_api/projectdata/'+$task
$request = [System.Net.WebRequest]::Create($url)
$request.Credentials = $spoCred
$request.Timeout=60000
$request.Accept="application/json;charset=utf-8"
$request.Headers.Add("X-FORMS_BASED_AUTH_ACCEPTED", "f")
$response = $request.GetResponse()
$reader = New-Object System.IO.StreamReader $response.GetResponseStream()
$data = $reader.ReadToEnd()
$data > $output
$getParas=$data |ConvertFrom-Json
$getSkipToken=$getParas.'odata.nextLink'
#Write-Host $getSkipToken
$topR='&?$top=1000'
$fullURL=$getSkipToken+$topR
$count=$count+1
}
while($getSkipToken -ne $nulll)
}
}*