2020-06-25 03:41:04 +00:00
|
|
|
# Copyright 2020 Google Inc.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
|
|
|
################################################################################
|
|
|
|
"""Unit tests for Cloud Function sync, which syncs the list of github projects
|
|
|
|
and uploads them to the Cloud Datastore."""
|
|
|
|
|
2020-07-06 07:52:24 +00:00
|
|
|
from collections import namedtuple
|
2020-06-25 03:41:04 +00:00
|
|
|
import os
|
|
|
|
import subprocess
|
|
|
|
import threading
|
2020-07-08 01:41:54 +00:00
|
|
|
import unittest
|
|
|
|
|
|
|
|
import requests
|
2020-06-25 03:41:04 +00:00
|
|
|
|
|
|
|
from google.cloud import ndb
|
|
|
|
|
2020-07-01 06:13:08 +00:00
|
|
|
from main import get_access_token
|
2020-07-08 01:41:54 +00:00
|
|
|
from main import get_projects
|
|
|
|
from main import sync_projects
|
2020-07-01 06:13:08 +00:00
|
|
|
from main import Project
|
2020-06-25 03:41:04 +00:00
|
|
|
|
|
|
|
_EMULATOR_TIMEOUT = 20
|
|
|
|
_DATASTORE_READY_INDICATOR = b'is now running'
|
2020-06-29 01:10:47 +00:00
|
|
|
_DATASTORE_EMULATOR_PORT = 8432
|
|
|
|
_TEST_PROJECT_ID = 'test-project'
|
2020-07-06 07:52:24 +00:00
|
|
|
ProjectMetadata = namedtuple('ProjectMetadata', 'schedule')
|
2020-06-25 03:41:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
def start_datastore_emulator():
|
|
|
|
"""Start Datastore emulator."""
|
|
|
|
return subprocess.Popen([
|
|
|
|
'gcloud',
|
|
|
|
'beta',
|
|
|
|
'emulators',
|
|
|
|
'datastore',
|
|
|
|
'start',
|
|
|
|
'--consistency=1.0',
|
2020-06-29 01:10:47 +00:00
|
|
|
'--host-port=localhost:' + str(_DATASTORE_EMULATOR_PORT),
|
|
|
|
'--project=' + _TEST_PROJECT_ID,
|
2020-06-25 03:41:04 +00:00
|
|
|
'--no-store-on-disk',
|
|
|
|
],
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.STDOUT)
|
|
|
|
|
|
|
|
|
|
|
|
def _wait_for_emulator_ready(proc,
|
|
|
|
emulator,
|
|
|
|
indicator,
|
|
|
|
timeout=_EMULATOR_TIMEOUT):
|
|
|
|
"""Wait for emulator to be ready."""
|
|
|
|
|
|
|
|
def _read_thread(proc, ready_event):
|
|
|
|
"""Thread to continuously read from the process stdout."""
|
|
|
|
ready = False
|
|
|
|
while True:
|
|
|
|
line = proc.stdout.readline()
|
|
|
|
if not line:
|
|
|
|
break
|
|
|
|
if not ready and indicator in line:
|
|
|
|
ready = True
|
|
|
|
ready_event.set()
|
|
|
|
|
|
|
|
# Wait for process to become ready.
|
|
|
|
ready_event = threading.Event()
|
|
|
|
thread = threading.Thread(target=_read_thread, args=(proc, ready_event))
|
|
|
|
thread.daemon = True
|
|
|
|
thread.start()
|
|
|
|
if not ready_event.wait(timeout):
|
|
|
|
raise RuntimeError(
|
|
|
|
'{} emulator did not get ready in time.'.format(emulator))
|
|
|
|
return thread
|
|
|
|
|
|
|
|
|
|
|
|
# pylint: disable=too-few-public-methods
|
|
|
|
class Repository:
|
|
|
|
"""Mocking Github Repository."""
|
|
|
|
|
|
|
|
def __init__(self, name, file_type, path, contents=None):
|
|
|
|
self.contents = contents or []
|
|
|
|
self.name = name
|
|
|
|
self.type = file_type
|
|
|
|
self.path = path
|
2020-07-06 07:52:24 +00:00
|
|
|
self.decoded_content = b"name: test"
|
2020-06-25 03:41:04 +00:00
|
|
|
|
|
|
|
def get_contents(self, path):
|
|
|
|
""""Get contents of repository."""
|
|
|
|
if self.path == path:
|
|
|
|
return self.contents
|
|
|
|
|
|
|
|
for content_file in self.contents:
|
|
|
|
if content_file.path == path:
|
|
|
|
return content_file.contents
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2020-07-06 07:52:24 +00:00
|
|
|
def set_yaml_contents(self, decoded_content):
|
|
|
|
"""Set yaml_contents."""
|
|
|
|
self.decoded_content = decoded_content
|
|
|
|
|
|
|
|
|
|
|
|
class CloudSchedulerClient:
|
|
|
|
"""Mocking cloud scheduler client."""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.schedulers = []
|
|
|
|
|
|
|
|
# pylint: disable=no-self-use
|
|
|
|
def location_path(self, project_id, location_id):
|
|
|
|
"""Return project path."""
|
|
|
|
return 'projects/{}/location/{}'.format(project_id, location_id)
|
|
|
|
|
|
|
|
def create_job(self, parent, job):
|
|
|
|
"""Simulate create job."""
|
|
|
|
del parent
|
|
|
|
if job['name'] not in self.schedulers:
|
|
|
|
self.schedulers.append(job)
|
|
|
|
|
|
|
|
# pylint: disable=no-self-use
|
|
|
|
def job_path(self, project_id, location_id, name):
|
|
|
|
"""Return job path."""
|
|
|
|
return 'projects/{}/location/{}/jobs/{}'.format(project_id, location_id,
|
|
|
|
name)
|
|
|
|
|
|
|
|
def delete_job(self, name):
|
|
|
|
"""Simulate delete jobs."""
|
|
|
|
for job in self.schedulers:
|
|
|
|
if job['name'] == name:
|
|
|
|
self.schedulers.remove(job)
|
|
|
|
break
|
|
|
|
|
|
|
|
def update(self, job, update_mask):
|
|
|
|
"""Simulate update jobs."""
|
|
|
|
for existing_job in self.schedulers:
|
|
|
|
if existing_job == job:
|
|
|
|
job['schedule'] = update_mask['schedule']
|
|
|
|
|
2020-06-25 03:41:04 +00:00
|
|
|
|
|
|
|
class TestDataSync(unittest.TestCase):
|
|
|
|
"""Unit tests for sync."""
|
|
|
|
|
2020-06-29 01:10:47 +00:00
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
|
|
|
ds_emulator = start_datastore_emulator()
|
|
|
|
_wait_for_emulator_ready(ds_emulator, 'datastore',
|
|
|
|
_DATASTORE_READY_INDICATOR)
|
|
|
|
os.environ['DATASTORE_EMULATOR_HOST'] = 'localhost:' + str(
|
|
|
|
_DATASTORE_EMULATOR_PORT)
|
|
|
|
os.environ['GOOGLE_CLOUD_PROJECT'] = _TEST_PROJECT_ID
|
|
|
|
os.environ['DATASTORE_DATASET'] = _TEST_PROJECT_ID
|
2020-07-06 07:52:24 +00:00
|
|
|
os.environ['GCP_PROJECT'] = 'test-project'
|
|
|
|
os.environ['FUNCTION_REGION'] = 'us-central1'
|
2020-06-29 01:10:47 +00:00
|
|
|
|
2020-07-08 01:41:54 +00:00
|
|
|
def setUp(self):
|
|
|
|
req = requests.post(
|
|
|
|
'http://localhost:{}/reset'.format(_DATASTORE_EMULATOR_PORT))
|
|
|
|
req.raise_for_status()
|
|
|
|
|
2020-07-06 07:52:24 +00:00
|
|
|
def test_sync_projects_update(self):
|
|
|
|
"""Testing sync_projects() updating a schedule."""
|
2020-06-25 03:41:04 +00:00
|
|
|
client = ndb.Client()
|
2020-07-06 07:52:24 +00:00
|
|
|
cloud_scheduler_client = CloudSchedulerClient()
|
2020-06-25 03:41:04 +00:00
|
|
|
|
|
|
|
with client.context():
|
2020-07-06 07:52:24 +00:00
|
|
|
Project(name='test1', schedule='0 8 * * *').put()
|
|
|
|
Project(name='test2', schedule='0 9 * * *').put()
|
2020-06-25 03:41:04 +00:00
|
|
|
|
2020-07-06 07:52:24 +00:00
|
|
|
projects = {
|
|
|
|
'test1': ProjectMetadata('0 8 * * *'),
|
|
|
|
'test2': ProjectMetadata('0 7 * * *')
|
|
|
|
}
|
|
|
|
sync_projects(cloud_scheduler_client, projects)
|
2020-06-25 03:41:04 +00:00
|
|
|
|
|
|
|
projects_query = Project.query()
|
2020-07-06 07:52:24 +00:00
|
|
|
self.assertEqual({
|
|
|
|
'test1': '0 8 * * *',
|
|
|
|
'test2': '0 7 * * *'
|
|
|
|
}, {project.name: project.schedule for project in projects_query})
|
|
|
|
|
|
|
|
def test_sync_projects_create(self):
|
|
|
|
""""Testing sync_projects() creating new schedule."""
|
|
|
|
client = ndb.Client()
|
|
|
|
cloud_scheduler_client = CloudSchedulerClient()
|
|
|
|
|
|
|
|
with client.context():
|
|
|
|
Project(name='test1', schedule='0 8 * * *').put()
|
2020-06-25 03:41:04 +00:00
|
|
|
|
2020-07-06 07:52:24 +00:00
|
|
|
projects = {
|
|
|
|
'test1': ProjectMetadata('0 8 * * *'),
|
|
|
|
'test2': ProjectMetadata('0 7 * * *')
|
|
|
|
}
|
|
|
|
sync_projects(cloud_scheduler_client, projects)
|
2020-06-29 01:10:47 +00:00
|
|
|
|
2020-07-06 07:52:24 +00:00
|
|
|
projects_query = Project.query()
|
|
|
|
self.assertEqual({
|
|
|
|
'test1': '0 8 * * *',
|
|
|
|
'test2': '0 7 * * *'
|
|
|
|
}, {project.name: project.schedule for project in projects_query})
|
|
|
|
|
|
|
|
def test_sync_projects_delete(self):
|
|
|
|
"""Testing sync_projects() deleting."""
|
|
|
|
client = ndb.Client()
|
|
|
|
cloud_scheduler_client = CloudSchedulerClient()
|
|
|
|
|
|
|
|
with client.context():
|
|
|
|
Project(name='test1', schedule='0 8 * * *').put()
|
|
|
|
Project(name='test2', schedule='0 9 * * *').put()
|
|
|
|
|
|
|
|
projects = {'test1': ProjectMetadata('0 8 * * *')}
|
|
|
|
sync_projects(cloud_scheduler_client, projects)
|
|
|
|
|
|
|
|
projects_query = Project.query()
|
|
|
|
self.assertEqual(
|
|
|
|
{'test1': '0 8 * * *'},
|
|
|
|
{project.name: project.schedule for project in projects_query})
|
|
|
|
|
|
|
|
def test_get_projects_yaml(self):
|
|
|
|
"""Testing get_projects() yaml get_schedule()."""
|
2020-06-25 03:41:04 +00:00
|
|
|
|
|
|
|
repo = Repository('oss-fuzz', 'dir', 'projects', [
|
2020-07-06 07:52:24 +00:00
|
|
|
Repository('test0', 'dir', 'projects/test0', [
|
|
|
|
Repository('Dockerfile', 'file', 'projects/test0/Dockerfile'),
|
|
|
|
Repository('project.yaml', 'file', 'projects/test0/project.yaml')
|
|
|
|
]),
|
|
|
|
Repository('test1', 'dir', 'projects/test1', [
|
|
|
|
Repository('Dockerfile', 'file', 'projects/test1/Dockerfile'),
|
|
|
|
Repository('project.yaml', 'file', 'projects/test1/project.yaml')
|
|
|
|
])
|
2020-06-25 03:41:04 +00:00
|
|
|
])
|
2020-07-06 07:52:24 +00:00
|
|
|
repo.contents[0].contents[1].set_yaml_contents(b'schedule: 2')
|
|
|
|
repo.contents[1].contents[1].set_yaml_contents(b'schedule: 3')
|
2020-06-25 03:41:04 +00:00
|
|
|
|
2020-07-06 07:52:24 +00:00
|
|
|
self.assertEqual(
|
|
|
|
get_projects(repo), {
|
|
|
|
'test0': ProjectMetadata('0 6,18 * * *'),
|
|
|
|
'test1': ProjectMetadata('0 6,14,22 * * *')
|
|
|
|
})
|
2020-06-25 03:41:04 +00:00
|
|
|
|
|
|
|
def test_get_projects_no_docker_file(self):
|
|
|
|
"""Testing get_projects() with missing dockerfile"""
|
|
|
|
|
|
|
|
repo = Repository('oss-fuzz', 'dir', 'projects', [
|
2020-07-06 07:52:24 +00:00
|
|
|
Repository('test0', 'dir', 'projects/test0', [
|
|
|
|
Repository('Dockerfile', 'file', 'projects/test0/Dockerfile'),
|
|
|
|
Repository('project.yaml', 'file', 'projects/test0/project.yaml')
|
|
|
|
]),
|
2020-06-25 03:41:04 +00:00
|
|
|
Repository('test1', 'dir', 'projects/test1')
|
|
|
|
])
|
|
|
|
|
2020-07-06 07:52:24 +00:00
|
|
|
self.assertEqual(get_projects(repo),
|
|
|
|
{'test0': ProjectMetadata('0 6 * * *')})
|
2020-06-25 03:41:04 +00:00
|
|
|
|
|
|
|
def test_get_projects_invalid_project_name(self):
|
|
|
|
"""Testing get_projects() with invalid project name"""
|
|
|
|
|
|
|
|
repo = Repository('oss-fuzz', 'dir', 'projects', [
|
2020-07-06 07:52:24 +00:00
|
|
|
Repository('test0', 'dir', 'projects/test0', [
|
|
|
|
Repository('Dockerfile', 'file', 'projects/test0/Dockerfile'),
|
|
|
|
Repository('project.yaml', 'file', 'projects/test0/project.yaml')
|
|
|
|
]),
|
|
|
|
Repository('test1@', 'dir', 'projects/test1', [
|
|
|
|
Repository('Dockerfile', 'file', 'projects/test1/Dockerfile'),
|
|
|
|
Repository('project.yaml', 'file', 'projects/test0/project.yaml')
|
|
|
|
])
|
2020-06-25 03:41:04 +00:00
|
|
|
])
|
|
|
|
|
2020-07-06 07:52:24 +00:00
|
|
|
self.assertEqual(get_projects(repo),
|
|
|
|
{'test0': ProjectMetadata('0 6 * * *')})
|
2020-06-25 03:41:04 +00:00
|
|
|
|
|
|
|
def test_get_projects_non_directory_type_project(self):
|
|
|
|
"""Testing get_projects() when a file in projects/ is not of type 'dir'."""
|
|
|
|
|
|
|
|
repo = Repository('oss-fuzz', 'dir', 'projects', [
|
2020-07-06 07:52:24 +00:00
|
|
|
Repository('test0', 'dir', 'projects/test0', [
|
|
|
|
Repository('Dockerfile', 'file', 'projects/test0/Dockerfile'),
|
|
|
|
Repository('project.yaml', 'file', 'projects/test0/project.yaml')
|
|
|
|
]),
|
2020-06-25 03:41:04 +00:00
|
|
|
Repository('test1', 'file', 'projects/test1')
|
|
|
|
])
|
|
|
|
|
2020-07-06 07:52:24 +00:00
|
|
|
self.assertEqual(get_projects(repo),
|
|
|
|
{'test0': ProjectMetadata('0 6 * * *')})
|
|
|
|
|
|
|
|
def test_invalid_yaml_format(self):
|
|
|
|
"""Testing invalid yaml schedule parameter argument."""
|
|
|
|
|
|
|
|
repo = Repository('oss-fuzz', 'dir', 'projects', [
|
|
|
|
Repository('test0', 'dir', 'projects/test0', [
|
|
|
|
Repository('Dockerfile', 'file', 'projects/test0/Dockerfile'),
|
|
|
|
Repository('project.yaml', 'file', 'projects/test0/project.yaml')
|
|
|
|
])
|
|
|
|
])
|
|
|
|
repo.contents[0].contents[1].set_yaml_contents(b'schedule: some-string')
|
|
|
|
|
|
|
|
self.assertEqual(get_projects(repo), {})
|
|
|
|
|
|
|
|
def test_yaml_out_of_range(self):
|
|
|
|
"""Testing invalid yaml schedule parameter argument."""
|
|
|
|
|
|
|
|
repo = Repository('oss-fuzz', 'dir', 'projects', [
|
|
|
|
Repository('test0', 'dir', 'projects/test0', [
|
|
|
|
Repository('Dockerfile', 'file', 'projects/test0/Dockerfile'),
|
|
|
|
Repository('project.yaml', 'file', 'projects/test0/project.yaml')
|
|
|
|
])
|
|
|
|
])
|
|
|
|
repo.contents[0].contents[1].set_yaml_contents(b'schedule: 5')
|
|
|
|
|
|
|
|
self.assertEqual(get_projects(repo), {})
|
2020-06-25 03:41:04 +00:00
|
|
|
|
|
|
|
def test_get_access_token(self):
|
|
|
|
"""Testing get_access_token()."""
|
|
|
|
client = ndb.Client()
|
|
|
|
|
|
|
|
with client.context():
|
|
|
|
self.assertRaises(RuntimeError, get_access_token)
|
|
|
|
|
2020-06-29 01:10:47 +00:00
|
|
|
@classmethod
|
|
|
|
def tearDownClass(cls):
|
|
|
|
# TODO: replace this with a cleaner way of killing the process
|
|
|
|
os.system('pkill -f datastore')
|
|
|
|
|
2020-06-25 03:41:04 +00:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2020-06-29 01:10:47 +00:00
|
|
|
|
2020-06-25 03:41:04 +00:00
|
|
|
unittest.main(exit=False)
|