将循环添加到现有的YouTube API Python脚本中



YouTube支持确认,不幸的是,YouTube API的设置方式,您不能同时将国家和设备类型作为维度,而唯一的工作是要提取设备类型的报告,然后一次在过滤器中添加1个国家。因此,您需要为每个国家发射一个API呼叫,可以通过适当的循环命令来完成,并在每个可能的国家/地区迭代API。我设法将API本身的代码录制为脚本,但是我需要在所有可能的国家/地区的循环方面的帮助(无论是通过API呼叫获取完整的国家列表,还是仅通过引用国家列表的CSV文件)。仅供参考,不可能将设备类型用作国家报告的过滤器。有人可以给我一个人吗?下面请找到我的Python代码。

https://1drv.ms/u/s!algtm2gifod43mzv1dqarcvsb81o

能够回答我自己的问题,这是任何人感兴趣的修订代码:https://1drv.ms/u/s!algtm2gifod43m3pltrfsrhofacz

#!/usr/bin/python
from datetime import datetime, timedelta
import httplib2
import os
import sys
import csv
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
import argparse
from oauth2client.tools import argparser, run_flow

# The CLIENT_SECRETS_FILE variable specifies the name of a file that contains
# the OAuth 2.0 information for this application, including its client_id and
# client_secret. You can acquire an OAuth 2.0 client ID and client secret from
# the Google Developers Console at
# https://console.developers.google.com/.
# Please ensure that you have enabled the YouTube Data and YouTube Analytics
# APIs for your project.
# For more information about using OAuth2 to access the YouTube Data API, see:
#   https://developers.google.com/youtube/v3/guides/authentication
# For more information about the client_secrets.json file format, see:
#   https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
CLIENT_SECRETS_FILE = "client_secretXYZ"
# These OAuth 2.0 access scopes allow for read-only access to the authenticated
# user's account for both YouTube Data API resources and YouTube Analytics Data.
YOUTUBE_SCOPES = ["https://www.googleapis.com/auth/youtube.readonly",
  "https://www.googleapis.com/auth/yt-analytics.readonly"]
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
YOUTUBE_ANALYTICS_API_SERVICE_NAME = "youtubeAnalytics"
YOUTUBE_ANALYTICS_API_VERSION = "v1"
# This variable defines a message to display if the CLIENT_SECRETS_FILE is
# missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
   %s
with information from the Developers Console
https://console.developers.google.com/
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
                                   CLIENT_SECRETS_FILE))

def get_authenticated_services(args):
  flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
    scope=" ".join(YOUTUBE_SCOPES),
    message=MISSING_CLIENT_SECRETS_MESSAGE)
  storage = Storage("%s-oauth2.json" % sys.argv[0])
  credentials = storage.get()
  if credentials is None or credentials.invalid:
    credentials = run_flow(flow, storage, args)
  http = credentials.authorize(httplib2.Http())
  youtube_analytics = build(YOUTUBE_ANALYTICS_API_SERVICE_NAME,
    YOUTUBE_ANALYTICS_API_VERSION, http=http)
  return youtube_analytics
def run_analytics_report(youtube_analytics, options, count):
  # Call the Analytics API to retrieve a report. For a list of available
  # reports, see:
  # https://developers.google.com/youtube/analytics/v1/channel_reports
  analytics_query_response = youtube_analytics.reports().query(
    ids="channel==%s" % options.channel_id,
    metrics=options.metrics,
    dimensions=options.dimensions,
    filters=options.filters,
    start_date=options.start_date,
    end_date=options.end_date,
    #max_results=options.max_results,
    sort=options.sort
  ).execute()
  print "Analytics Data for Channel %s" % options.channel_id
  if count == 0:
    with open('results.csv', 'w') as csv_out:
           csvWriter=csv.writer(csv_out, delimiter=',', lineterminator = 'n')
           headers = [ch["name"] for ch in analytics_query_response.get("columnHeaders", [])]
           headers.append("country")
           csvWriter.writerow(headers)
  else:
    with open('results.csv', 'a') as csv_out:
      csvWriter=csv.writer(csv_out, delimiter=',', lineterminator = 'n')
      for row in analytics_query_response.get("rows", []):
        values = []
        for value in row:
          values.append(str(value))
        values.append((options.filters[9]+""+options.filters[10]))
        csvWriter.writerow(values)
  print "Results exported to csv"
'''
  for column_header in analytics_query_response.get("columnHeaders", []):
     print "%-20s" % column_header["name"],
  print
  for row in analytics_query_response.get("rows", []):
     for value in row:
       print "%-20s" % value,
     print
'''
if __name__ == "__main__":
  count = 0
  now = datetime.now()
  one_day_ago = (now - timedelta(days=1)).strftime("%Y-%m-%d")
  one_week_ago = (now - timedelta(days=7)).strftime("%Y-%m-%d")
  f = open('countries.csv', 'rb')
  reader = csv.reader(f)
  for row in reader:
    argparser = argparse.ArgumentParser()
    argparser.add_argument("--channel-id", help="Channel ID",
      default="UCJ5v_MCY6GNUBTO8-D3XoAg")
    argparser.add_argument("--metrics", help="Report metrics",
      default="views,estimatedMinutesWatched")
    argparser.add_argument("--dimensions", help="Report dimensions",
      default="deviceType")
    argparser.add_argument("--filters", help="Report filters",
      default="country==" + ''.join(row))
    argparser.add_argument("--start-date", default=one_week_ago,
      help="Start date, in YYYY-MM-DD format")
    argparser.add_argument("--end-date", default=one_day_ago,
      help="End date, in YYYY-MM-DD format")
    #argparser.add_argument("--max-results", help="Max results", default=10)
    argparser.add_argument("--sort", help="Sort order", default="-views")
    args = argparser.parse_args()
    youtube_analytics = get_authenticated_services(args)
    try:
      run_analytics_report(youtube_analytics, args, count)
      count = count + 1
    except HttpError, e:
      print "An HTTP error %d occurred:n%s" % (e.resp.status, e.content)

相关内容

  • 没有找到相关文章

最新更新