无法使用python从谷歌驱动器下载大文件

发布于 2025-01-09 08:40:17 字数 2471 浏览 0 评论 0原文

我想使用 python 从 google 驱动器下载大文件。 我使用下面的代码做到了这一点

import pickle
import os.path
import requests
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
import os
import pickle

class DriveAPI:
   global SCOPES
   SCOPES = ['https://www.googleapis.com/auth/drive.file']
   def __init__(self):
       self.creds = None
       if os.path.exists('token.pickle'):
           with open('token.pickle','rb') as token:
               self.creds = pickle.load(token)
       if not self.creds or not self.creds.valid:
           if self.creds and self.creds.expired and self.creds.refresh_token:
               self.creds.refresh(Request())
           else:
               flow = InstalledAppFlow.from_client_secrets_file('credentials.json', SCOPES)
               self.creds = flow.run_local_server(port=0)
           with open('token.pickle', 'wb') as token:
               pickle.dump(self.creds,token)
       self.service = build('drive','v3',credentials=self.creds)
       results = self.service.files().list(pageSize=100,fields='files(id,name,createdTime)').execute()
       items = results.get('files',[])

   def download_file_from_google_drive(self,id, destination):
      def get_confirm_token(response):
          for key, value in response.cookies.items():
              if key.startswith('download_warning'):
                  return value
       return None

   def save_response_content(self,response, destination):
       CHUNK_SIZE = 32768
       with open(destination, "wb") as f:
           for chunk in response.iter_content(CHUNK_SIZE):
               if chunk: # filter out keep-alive new chunks
                   f.write(chunk)

   URL = "https://docs.google.com/uc?export=download"

   session = requests.Session()

   response = session.get(URL, params = { 'id' : id }, stream = True)
   token = get_confirm_token(response)
   if token:
       params = { 'id' : id, 'confirm' : token }
       response = session.get(URL, params = params, stream = True)
   save_response_content(response, destination)    

if __name__ == "__main__":
   obj = DriveAPI()
   f_id = "File ID"
   file_name = "File Name"
   obj.service.permissions().create(body={"role":"reader", "type":"anyone"}, fileId=f_id).execute()
   obj.FileDownload(f_id,file_name)

通过使用上面的代码,我能够在一段时间内(例如 2 个月)下载 2Gb 大小的文件。但现在我无法下载大文件。 如果我运行此代码,该文件仅下载 2.2kb 的文件。 但终端打印没有问题。

I want to download large size files from google drive using python.
And I did this using below code

import pickle
import os.path
import requests
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
import os
import pickle

class DriveAPI:
   global SCOPES
   SCOPES = ['https://www.googleapis.com/auth/drive.file']
   def __init__(self):
       self.creds = None
       if os.path.exists('token.pickle'):
           with open('token.pickle','rb') as token:
               self.creds = pickle.load(token)
       if not self.creds or not self.creds.valid:
           if self.creds and self.creds.expired and self.creds.refresh_token:
               self.creds.refresh(Request())
           else:
               flow = InstalledAppFlow.from_client_secrets_file('credentials.json', SCOPES)
               self.creds = flow.run_local_server(port=0)
           with open('token.pickle', 'wb') as token:
               pickle.dump(self.creds,token)
       self.service = build('drive','v3',credentials=self.creds)
       results = self.service.files().list(pageSize=100,fields='files(id,name,createdTime)').execute()
       items = results.get('files',[])

   def download_file_from_google_drive(self,id, destination):
      def get_confirm_token(response):
          for key, value in response.cookies.items():
              if key.startswith('download_warning'):
                  return value
       return None

   def save_response_content(self,response, destination):
       CHUNK_SIZE = 32768
       with open(destination, "wb") as f:
           for chunk in response.iter_content(CHUNK_SIZE):
               if chunk: # filter out keep-alive new chunks
                   f.write(chunk)

   URL = "https://docs.google.com/uc?export=download"

   session = requests.Session()

   response = session.get(URL, params = { 'id' : id }, stream = True)
   token = get_confirm_token(response)
   if token:
       params = { 'id' : id, 'confirm' : token }
       response = session.get(URL, params = params, stream = True)
   save_response_content(response, destination)    

if __name__ == "__main__":
   obj = DriveAPI()
   f_id = "File ID"
   file_name = "File Name"
   obj.service.permissions().create(body={"role":"reader", "type":"anyone"}, fileId=f_id).execute()
   obj.FileDownload(f_id,file_name)

By using above code I was able to download 2Gb size file for a certain period of time like 2 months. But now I'm unable to download large size files.
If I run this code the file downloads only 2.2kb file present.
But there is no issues that prints in terminal.

如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

扫码二维码加入Web技术交流群

发布评论

需要 登录 才能够评论, 你可以免费 注册 一个本站的账号。
列表为空,暂无数据
我们使用 Cookies 和其他技术来定制您的体验包括您的登录状态等。通过阅读我们的 隐私政策 了解更多相关信息。 单击 接受 或继续使用网站,即表示您同意使用 Cookies 和您的相关数据。
原文