I get the id of my message between 2 dates, but it is impossible to delete this messages by their id in my program.
Because I going always in my exception code "erreur suppression".
I have tested in the "Try api" with the same settings and it's work: https://developers.google.com/gmail/api/v1/reference/users/messages/delete?apix_params=%7B%22userId%22%3A%22me%22%2C%22id%22%3A%2216be66c1f679ceee%22%7D#auth
The type of content is a string also like the type of the id api.
I use this method: https://developers.google.com/gmail/api/quickstart/python
And here is the code useful:
# -*- coding: utf-8 -*-
from __future__ import print_function
from datetime import date, timedelta, datetime
import pickle
import os.path
import time
import datetime
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from apiclient import errors
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/gmail.readonly']
def main():
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file('credentials.json', SCOPES)
creds = flow.run_local_server()
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('gmail', 'v1', credentials=creds)
debut = input("Entrez la date de début de supression des messages au fomat JJ/MM/AAAA : ")
fin = input("Entrez la date de fin de supression des messages au format JJ/MM/AAAA : ")
ts_debut = time.mktime(datetime.datetime.strptime(debut, "%d/%m/%Y").timetuple())
ts_fin = time.mktime(datetime.datetime.strptime(fin, "%d/%m/%Y").timetuple())
debut = int(ts_debut)
fin = int(ts_fin)
query = 'after:' + str(debut) + 'AND before:' + str(fin)
results = service.users().messages().list(userId='me', labelIds=['INBOX'], q=query).execute()
# Call the Gmail API
messages = results.get('messages', [])
user_id = 'user email address'
if not messages:
print('No labels found.')
# Dates have to formatted in YYYY/MM/DD format for gmailelse:
print('Labels:')
for messages in messages:
content = messages['id']
try:
message = service.users().messages().get(userId='me', id=content, format='metadata').execute()
try:
service.users().messages().delete(userId=user_id, id=content).execute()
print("")
except errors.HttpError:
print ("erreur")
print(content)
except:
print('erreur date')
main()
I have found the answer, my scope was false: gmail.readonly changed to https://mail.google.com/ for Full access to the account, including permanent deletion of threads and messages.
Go to this link for more informations: https://developers.google.com/gmail/api/auth/scopes
Related
I am working on this last 3 days. i don't have any idea.. there are similar question but i didn't got my answer.
this is ERROR
File ID: 1U4XUrAhMk1WFAKE_IDqmQcteYqmIWPMFEd Traceback (most recent
File is Created I want to USE this ID and Edit Docs in Drive
call last): File "main.py", line 61, in
main() File "main.py", line 53, in main
service.documents() AttributeError: 'Resource' object has no attribute 'documents'
My goal
create Docs in GOOGLE Drive
insert Table in it
from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
import sys
from gdoctableapppy import gdoctableapp
# If modifying these scopes, delete the file token.pickle.
SCOPES = ["https://www.googleapis.com/auth/drive"]
def main():
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists("token.pickle"):
with open("token.pickle", "rb") as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file("credentials.json", SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open("token.pickle", "wb") as token:
pickle.dump(creds, token)
service = build("drive", "v3", credentials=creds)
serviceDoc = build("docs", "v1", credentials=creds)
# Call the Drive v3 API
# Create Google Docs file in folder
file_metadata = {
"name": sys.argv[1],
"parents": ["Folder ID"],
}
file = service.files().create(body=file_metadata, fields="id").execute()
print("File ID: %s" % file.get("id"))
DOCUMENT_ID = file.get("id")
requests = [{"insertTable": {"rows": 2, "columns": 2, "location": {"index": 1}}}]
result = (
service.documents()
.batchUpdate(documentId=DOCUMENT_ID, body={"requests": requests})
.execute()
)
return
if __name__ == "__main__":
main()
The reason why you are encountering such error is because your service variable is for Drive API, it doesn't have a documents() method.
Use serviceDoc instead:
serviceDoc.documents()
.batchUpdate(documentId=DOCUMENT_ID, body={"requests": requests})
.execute()
In addition:
I noticed that when you create a Docs file mimeType is not part of your file_metadata. If you create files without a specific mimeType, your newly created file will be application/octet-stream. See Create Files
If you want to create a Google Docs using Drive API, please add a "mimeType"='application/vnd.google-apps.document' in your file_metadata
Sample:
file_metadata = {
"name": sys.argv[1],
"mimeType"='application/vnd.google-apps.document',
"parents": ["Folder ID"]
}
Reference:
Google Workspace and Drive MIME Types
So I have this code below which shows the number of times emails have been sent to a particular email address (from a list of emails saved in Google Sheet) and it countups the value in different sessions the code runs. The values are stored in a JSON file.
Is there a way I can push these values from JSON file to Google Sheet directly from Python. I want to do this without using Pandas dataframe and IMPORTJSON function in Google sheet.
As a newbie to programming and Python, I have tried researching this but have come to a dead-end. Any help appreciated for this.
import gspread
from oauth2client.service_account import ServiceAccountCredentials
# login & open sheet sheets
scope = ["https://spreadsheets.google.com/feeds", 'https://www.googleapis.com/auth/spreadsheets',
"https://www.googleapis.com/auth/drive.file", "https://www.googleapis.com/auth/drive"]
credentials = ServiceAccountCredentials.from_json_keyfile_name('my-jsonfile.json', scope)
client = gspread.authorize(credentials)
sheet3 = client.open('Dashboard').worksheet('Sheet3') # Open the spreadsheet
sheet1 = client.open('Dashboard').worksheet('Sheet1') # Open the spreadsheet
sheet4 = client.open('Dashboard').worksheet('Sheet4') # Open the spreadsheet
###Countup ###########
import smtplib
import ssl
from email.mime.text import MIMEText # New line
from email.utils import formataddr # New line
# User configuration
sender_email = 'email'
sender_name = 'username'
password = "password"
x = sheet3.row_values(4)
c = len(sheet1.col_values(8))
cell = ("")
from collections import Counter
import json
counter_file_path = "counter.json"
try:
with open(counter_file_path, "r") as f:
email_stats = json.load(f)
except FileNotFoundError as ex:
email_stats = {}
successful_emails = []
for x in range (4, c):
names = sheet3.cell(x + 1, 6).value
emails = sheet3.cell(x + 1, 8).value
if names == cell and emails == cell:
print("no_data")
else:
receiver_names = list(names.split())
receiver_emails = list(emails.split())
# Email text
email_body = '''
This is a test email sent by Python. Isn't that cool?
'''
for receiver_email, receiver_name in zip(receiver_emails, receiver_names):
print("Sending the email...")
# Configurating user's info
msg = MIMEText(email_body, 'plain')
msg['To'] = formataddr((receiver_name, receiver_email))
msg['From'] = formataddr((sender_name, sender_email))
msg['Subject'] = 'Hello, my friend ' + receiver_name
try:
# Creating a SMTP session | use 587 with TLS, 465 SSL and 25
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
# Encrypts the email
context = ssl.create_default_context()
server.starttls(context=context)
# We log in into our Google account
server.login(sender_email, password)
# Sending email from sender, to receiver with the email body
server.sendmail(sender_email, receiver_email, msg.as_string())
print('Email sent!')
successful_emails.append(receiver_email)
if receiver_email in email_stats:
email_stats[receiver_email] += 1
else:
email_stats[receiver_email] = 1
except Exception as e:
print(f'Oh no! Something bad happened!n {e}')
finally:
print('Closing the server...')
server.quit()
# counter = Counter(successful_emails)
# print(counter)
print(email_stats) # output - all occurrences for each email
with open(counter_file_path, "w") as f:
results = json.dump(email_stats, f)
The results of the JSON file are {"receiver_emails_1": 6, "receiver_emails_2": 6}
Try this.
Please reference to this documentation.
(Update a range of cells using the top left corner address).
https://gspread.readthedocs.io/en/latest/#example-usage
import json
import gspread
from google.oauth2.service_account import Credentials
# connect to your google sheet
scope = ['https://spreadsheets.google.com/feeds',
'https://www.googleapis.com/auth/drive']
credentials = Credentials.from_service_account_file('key.json', scopes=scope)
gc = gspread.authorize(credentials)
wks = gc.open("your spreadsheet name").sheet1
# Let's say you have some json values
x = '{ "receiver_email_1":6, "receiver_email_2":8, "receiver_email_3":10 }'
y = json.loads(x)
result = []
for key in y:
result.append([key,y[key]])
wks.update('A1', result)
My google drive is linked to my google colab notebook. Using the pytorch library torch.load($PATH) fails to load this 219 Mo file (pre-trained neural network) (https://drive.google.com/drive/folders/1-9m4aVg8Hze0IsZRyxvm5gLybuRLJHv-) which is in my google drive. However it works fine when I do it locally on my computer. The error i get on google collab is: (settings: Python 3.6, pytorch 1.3.1):
state_dict = torch.load(model_path)['state_dict']
File "/usr/local/lib/python3.6/dist-packages/torch/serialization.py", line 303, in load
return _load(f, map_location, pickle_module)
File "/usr/local/lib/python3.6/dist-packages/torch/serialization.py", line 454, in _load
return legacy_load(f)
File "/usr/local/lib/python3.6/dist-packages/torch/serialization.py", line 380, in legacy_load
with closing(tarfile.open(fileobj=f, mode='r:', format=tarfile.PAX_FORMAT)) as tar,
File "/usr/lib/python3.6/tarfile.py", line 1589, in open
return func(name, filemode, fileobj, **kwargs)
File "/usr/lib/python3.6/tarfile.py", line 1619, in taropen
return cls(name, mode, fileobj, **kwargs)
File "/usr/lib/python3.6/tarfile.py", line 1482, in init
self.firstmember = self.next()
File "/usr/lib/python3.6/tarfile.py", line 2297, in next
tarinfo = self.tarinfo.fromtarfile(self)
File "/usr/lib/python3.6/tarfile.py", line 1092, in fromtarfile
buf = tarfile.fileobj.read(BLOCKSIZE)
OSError: [Errno 5] Input/output error```
Any help would be much appreciated!
Large sized files are automatically analyzed for virus on Drive, every time you attempt to download a large file you have to pass thru this scan, making it hard to reach the download link.
You could download the file directly using the Drive API and then pass it to the torch, it shouldn't be hard to implement on Python, I've made a sample on how to Download your file and pass it to Torch.
import torch
import pickle
import os.path
import io
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from googleapiclient.http import MediaIoBaseDownload
from __future__ import print_function
url = "https://drive.google.com/file/d/1RwpuwNPt_r0M5mQGEw18w-bCfKVwnZrs/view?usp=sharing"
# If modifying these scopes, delete the file token.pickle.
SCOPES = (
'https://www.googleapis.com/auth/drive',
)
def main():
"""Shows basic usage of the Sheets API.
Prints values from a sample spreadsheet.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
drive_service = build('drive', 'v2', credentials=creds)
file_id = '1RwpuwNPt_r0M5mQGEw18w-bCfKVwnZrs'
request = drive_service.files().get_media(fileId=file_id)
# fh = io.BytesIO()
fh = open('file', 'wb')
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
print("Download %d%%." % int(status.progress() * 100))
fh.close()
torch.load('file')
if __name__ == '__main__':
main()
To run it you'll have first to:
Enable the Drive API for your account
Install the Google Drive API libraries,
This takes no more than 3 minutes and is properly explained on the Quickstart Guide for Google Drive API, just follow steps 1 and 2 and run the provided sample code from above.
It worked by uploading directly the file to google colab instead of loading it from google drive using:
from google.colab import files
uploaded= files.upload()
I guess this solution is similar to the one proposed by #Yuri
I am trying to create a Google Docs file using the API with Python.
I have followed every instruction on their API Guides and Reference page.
Including creating their quickstart script
from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/drive.metadata.readonly']
def main():
SCOPES = ['https://www.googleapis.com/auth/drive.file']
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('drive', 'v3', credentials=creds)
# Call the Drive v3 API
results = service.files().list(
pageSize=10, fields="nextPageToken, files(id, name)").execute()
items = results.get('files', [])
if not items:
print('No files found.')
else:
print('Files:')
for item in items:
print(u'{0} ({1})'.format(item['name'], item['id']))
title = 'My Document'
body = {
'title': title
}
doc = service.files() \
.create(body=body).execute()
print('Created document with title: {0}'.format(
doc.get('title')))
if __name__ == '__main__':
main()
I expected a Google Docs file to be created but instead the script returned: Created document with title: None.
There are no errors it returns but clearly something is missing for it to not create the file.
I am quite frustrated because I spent 9 hours trying to get Google Drive's very own script to work. The code is a direct copy-paste from the Google Drive and Docs API documentation except for where I changed the scope from "SCOPES = ['https://www.googleapis.com/auth/drive.metadata.readonly']" to "SCOPES = ['https://www.googleapis.com/auth/drive.file']" because with the former it was crashing and their API documentation advises to use the latter as the scope when trying to create files.
Edit:
Current script:
from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/documents']
def main():
"""Shows basic usage of the Docs API.
Prints the title of a sample document.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('docs', 'v1', credentials=creds)
title = 'My Document'
body = {
'title': title
}
doc = service.documents() \
.create(body=body).execute()
print('Created document with title: {0}'.format(
doc.get('title')))
return
if __name__ == '__main__':
main()
I get the following error:
Traceback
(most recent call last):
File "create-teamwork-sops.py", line 137, in <module>
main()
File "create-teamwork-sops.py", line 131, in main
.create(body=body).execute()
File "C:\Python27\lib\site-packages\googleapiclient\_helpers.py", line 130, in positional_wrapper
return wrapped(*args, **kwargs)
File "C:\Python27\lib\site-packages\googleapiclient\http.py", line 855, in execute
raise HttpError(resp, content, uri=self.uri)
googleapiclient.errors.HttpError: <HttpError 403 when requesting https://docs.googleapis.com/v1/documents?alt=json returned "Request had insufficient authentication scopes.">
Notes: every time the value of SCOPES is changed, the file token.pickle needs to be deleted, and when the script runs it will ask you to log into Google Drive and will create a new token.pickle file, which will allow the new scope to be taken into account.
Working script:
from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
SCOPES = ['https://www.googleapis.com/auth/drive']
def main():
"""Shows basic usage of the Docs API.
Prints the title of a sample document.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
# service = build('docs', 'v1', credentials=creds)
service = build('drive', 'v3', credentials=creds)
# title = 'My Document'
# body = {
# 'title': title
# }
# doc = service.documents() \
# .create(body=body).execute()
# print('Created document with title: {0}'.format(
# doc.get('title')))
# get folder ID
page_token = None
while True:
response = service.files().list(q="mimeType = 'application/vnd.google-apps.folder'",
spaces='drive',
fields='nextPageToken, files(id, name)',
pageToken=page_token).execute()
for file in response.get('files', []):
# Process change
print('Found file: %s (%s)' % (file.get('name'), file.get('id')))
if file.get('name')=="SOPs":
folder_id=file.get('id')
break
page_token = response.get('nextPageToken', None)
if page_token is None:
break
# create Google Docs file in folder
file_metadata = {
'name': 'my doc 2',
'parents': [folder_id]
}
# media = MediaFileUpload('files/photo.jpg',
# mimetype='image/jpeg',
# resumable=True)
file = service.files().create(body=file_metadata,
# media_body=media,
fields='id').execute()
print('File ID: %s' % file.get('id'))
return
if __name__ == '__main__':
main()
You want to create new Google Document using Docs API.
You want to put the created new Google Document to the specific folder.
You want to achieve this using google-api-python-client with Python.
I could understand like this. If my understanding is correct, unfortunately, when new Google Document is created by Docs API, the Document is the root folder. So when you want to directly create the new Document to the specific folder, please use Drive API. The modified script is as follows.
From:
body = {
'title': title
}
To:
body = {
'name': title,
'mimeType': 'application/vnd.google-apps.document',
'parents': ['### folder ID ###']
}
Please set the folder ID to 'parents': ['### folder ID ###'].
Note:
Of course, after new Document was created to the root folder by Docs API, the file can be moved to the specific folder using Drive API. In this case, 2 API calls are used. So I proposed above modification.
If you want to create new Google Document using Docs API, please modify as follows. In this modified script, it supposes that you have already been able to set and get values for Google Document using Google Docs API.
From
doc = service.files() \
.create(body=body).execute()
To
serviceForDocs = build('docs', 'v1', credentials=creds)
doc = serviceForDocs.documents().create(body=body).execute()
References:
Method: documents.create of Docs API
Files: create of Drive API
I'm programming a script to create multiple Google Docs files.
I have followed the Google Docs API Guides:
Quickstart (https://developers.google.com/docs/api/quickstart/python)
and Creating and managing documents (https://developers.google.com/docs/api/how-tos/documents)
from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
def main():
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('drive', 'v3', credentials=creds)
# Call the Drive v3 API
results = service.files().list(
pageSize=10, fields="nextPageToken, files(id, name)").execute()
items = results.get('files', [])
if not items:
print('No files found.')
else:
print('Files:')
for item in items:
print(u'{0} ({1})'.format(item['name'], item['id']))
print("Creating blank Google Docs file")
title = 'My Document'
body = {
'title': title
}
doc = service.documents() \
.create(body=body).execute()
print('Created document with title: {0}'.format(
doc.get('title')))
return
if __name__ == '__main__':
main()
Note:
This website does not display the indentation around "print("Creating blank Google Docs file")" and "return" correctly. That should be indented once and not twice as it is showing here. The block is not part of the "else: print('Files:')" branch.
Instead of creating a Google Docs file I get the following error:
Traceback (most recent call last):
File "create-teamwork-sops.py", line 234, in <module>
main()
File "create-teamwork-sops.py", line 227, in main
doc = service.documents() \
AttributeError: 'Resource' object has no attribute 'documents'