project
stringlengths
1
98
commit_sha
stringlengths
40
40
parent_sha
stringlengths
40
40
file_path
stringlengths
4
209
project_url
stringlengths
23
132
likely_bug
bool
1 class
comodified
bool
1 class
in_function
bool
2 classes
diff
stringlengths
27
9.71k
before
stringlengths
1
8.91k
after
stringlengths
1
6k
sstub_pattern
stringclasses
23 values
edit_script
stringlengths
33
158k
key
stringlengths
45
154
commit_message
stringlengths
3
65.5k
files
list
awx
fe9f502e919bfe35b7ed89b523803c0b6cd65250
7bff586c4d29041b0a6085c110ba3a5f83170d3a
awx/main/middleware.py
https://github.com/gconsidine/awx
true
false
false
@@ -5,7 +5,7 @@ import logging import threading import uuid -from django.contrib.auth.models import User, AnonymousUser +from django.contrib.auth.models import User from django.db.models.signals import post_save from django.db import IntegrityError from django.http import HttpResponseRedirect
from django . contrib . auth . models import User , AnonymousUser
from django . contrib . auth . models import User
SINGLE_STMT
[["Delete", [",:,", 3, 44, 3, 45]], ["Delete", ["identifier:AnonymousUser", 3, 46, 3, 59]], ["Delete", ["dotted_name", 3, 46, 3, 59]]]
gconsidine/awx@fe9f502e919bfe35b7ed89b523803c0b6cd65250
flake8 fix
[ { "sha": "f73758ad7dd5e56d565e64d8c0f741fef8e41f57", "filename": "awx/main/middleware.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/fe9f502e919bfe35b7ed89b523803c0b6cd65250/awx%2Fmain%2Fmiddleware.py", "raw_url": "https://github.com/gconsidine/awx/raw/fe9f502e919bfe35b7ed89b523803c0b6cd65250/awx%2Fmain%2Fmiddleware.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmiddleware.py?ref=fe9f502e919bfe35b7ed89b523803c0b6cd65250", "patch": "@@ -5,7 +5,7 @@\n import threading\n import uuid\n \n-from django.contrib.auth.models import User, AnonymousUser\n+from django.contrib.auth.models import User\n from django.db.models.signals import post_save\n from django.db import IntegrityError\n from django.http import HttpResponseRedirect" } ]
awx
3132d4efeefa95098b18519c5a693a1c6b536a72
01eca729b1b7fde289fd5f8d0b40cebfbbf38b9f
awx/api/license.py
https://github.com/gconsidine/awx
true
false
true
@@ -16,7 +16,7 @@ def get_license(show_key=False): license_reader = TaskSerializer() - return license_reader.from_file(show_key=show_key) + return license_reader.from_database(show_key=show_key) def feature_enabled(name):
return license_reader . from_file ( show_key = show_key )
return license_reader . from_database ( show_key = show_key )
WRONG_FUNCTION_NAME
[["Update", ["identifier:from_file", 1, 27, 1, 36], "from_database"]]
gconsidine/awx@3132d4efeefa95098b18519c5a693a1c6b536a72
Fix license enablement utility helpers We know call "from_database" which can implicitly call "from_file" if the database config isn't available.
[ { "sha": "c380df5a5643f271ac3dd2d232c6cbd7b58328a4", "filename": "awx/api/license.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/3132d4efeefa95098b18519c5a693a1c6b536a72/awx%2Fapi%2Flicense.py", "raw_url": "https://github.com/gconsidine/awx/raw/3132d4efeefa95098b18519c5a693a1c6b536a72/awx%2Fapi%2Flicense.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Flicense.py?ref=3132d4efeefa95098b18519c5a693a1c6b536a72", "patch": "@@ -16,7 +16,7 @@ def get_license(show_key=False):\n place on this Tower instance.\n \"\"\"\n license_reader = TaskSerializer()\n- return license_reader.from_file(show_key=show_key)\n+ return license_reader.from_database(show_key=show_key)\n \n \n def feature_enabled(name):" } ]
awx
c2875c65ca741cf587378be8c5b57fc6c68f84f5
fd2339dbb8913ead02d802f845c4ed2f8d2bd6ae
awx/plugins/callback/job_event_callback.py
https://github.com/gconsidine/awx
true
false
false
@@ -56,7 +56,7 @@ if os.environ.get('GRAPHITE_PORT_8125_UDP_ADDR'): prefix='tower.job.event_callback', maxudpsize=512) else: - from statsd import StatsClientBase + from statsd.client import StatsClientBase class NoStatsClient(StatsClientBase): def __init__(self, *args, **kwargs):
from statsd import StatsClientBase
from statsd . client import StatsClientBase
SINGLE_STMT
[["Insert", ["dotted_name", 3, 10, 3, 16], [".:.", "T"], 1], ["Insert", ["dotted_name", 3, 10, 3, 16], ["identifier:client", "T"], 2]]
gconsidine/awx@c2875c65ca741cf587378be8c5b57fc6c68f84f5
Fix stats client import
[ { "sha": "064ca78c22523f775159fd6bf400d183d02d91f2", "filename": "awx/plugins/callback/job_event_callback.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/c2875c65ca741cf587378be8c5b57fc6c68f84f5/awx%2Fplugins%2Fcallback%2Fjob_event_callback.py", "raw_url": "https://github.com/gconsidine/awx/raw/c2875c65ca741cf587378be8c5b57fc6c68f84f5/awx%2Fplugins%2Fcallback%2Fjob_event_callback.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fplugins%2Fcallback%2Fjob_event_callback.py?ref=c2875c65ca741cf587378be8c5b57fc6c68f84f5", "patch": "@@ -56,7 +56,7 @@\n prefix='tower.job.event_callback',\n maxudpsize=512)\n else:\n- from statsd import StatsClientBase\n+ from statsd.client import StatsClientBase\n \n class NoStatsClient(StatsClientBase):\n def __init__(self, *args, **kwargs):" } ]
awx
a44318f0cdfa706a04b3372db52f2cfb4106d0bf
320a5b77762c8b527e7ff98b7670c07c0ecee434
awx/main/access.py
https://github.com/gconsidine/awx
true
false
true
@@ -149,7 +149,7 @@ class BaseAccess(object): def check_license(self, add_host=False, feature=None, check_expiration=True): reader = TaskSerializer() validation_info = reader.from_database() - if ('test' in sys.argv or 'jenkins' in sys.argv) and not os.environ.get('SKIP_LICENSE_FIXUP_FOR_TEST', ''): + if ('test' in sys.argv or 'py.test' in sys.argv[0] or 'jenkins' in sys.argv) and not os.environ.get('SKIP_LICENSE_FIXUP_FOR_TEST', ''): validation_info['free_instances'] = 99999999 validation_info['time_remaining'] = 99999999
if ( 'test' in sys . argv or 'jenkins' in sys . argv ) and not os . environ . get ( 'SKIP_LICENSE_FIXUP_FOR_TEST' , '' ) : validation_info [ 'free_instances' ] = 99999999 validation_info [ 'time_remaining' ] = 99999999
if ( 'test' in sys . argv or 'py.test' in sys . argv [ 0 ] or 'jenkins' in sys . argv ) and not os . environ . get ( 'SKIP_LICENSE_FIXUP_FOR_TEST' , '' ) : validation_info [ 'free_instances' ] = 99999999 validation_info [ 'time_remaining' ] = 99999999
LESS_SPECIFIC_IF
[["Insert", ["boolean_operator", 3, 13, 3, 56], ["boolean_operator", "N0"], 0], ["Insert", ["boolean_operator", 3, 13, 3, 56], ["or:or", "T"], 1], ["Move", "N0", ["comparison_operator", 3, 13, 3, 31], 0], ["Move", "N0", ["or:or", 3, 32, 3, 34], 1], ["Insert", "N0", ["comparison_operator", "N1"], 2], ["Insert", "N1", ["string:'py.test'", "T"], 0], ["Insert", "N1", ["in:in", "T"], 1], ["Insert", "N1", ["subscript", "N2"], 2], ["Insert", "N2", ["attribute", "N3"], 0], ["Insert", "N2", ["[:[", "T"], 1], ["Insert", "N2", ["integer:0", "T"], 2], ["Insert", "N2", ["]:]", "T"], 3], ["Insert", "N3", ["identifier:sys", "T"], 0], ["Insert", "N3", [".:.", "T"], 1], ["Insert", "N3", ["identifier:argv", "T"], 2]]
gconsidine/awx@a44318f0cdfa706a04b3372db52f2cfb4106d0bf
fixed check_license to work with py.test
[ { "sha": "ad87506ab2fd6b1de11deb38348de3c725ad88a7", "filename": "awx/main/access.py", "status": "modified", "additions": 8, "deletions": 8, "changes": 16, "blob_url": "https://github.com/gconsidine/awx/blob/a44318f0cdfa706a04b3372db52f2cfb4106d0bf/awx%2Fmain%2Faccess.py", "raw_url": "https://github.com/gconsidine/awx/raw/a44318f0cdfa706a04b3372db52f2cfb4106d0bf/awx%2Fmain%2Faccess.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Faccess.py?ref=a44318f0cdfa706a04b3372db52f2cfb4106d0bf", "patch": "@@ -149,7 +149,7 @@ def can_unattach(self, obj, sub_obj, relationship):\n def check_license(self, add_host=False, feature=None, check_expiration=True):\n reader = TaskSerializer()\n validation_info = reader.from_database()\n- if ('test' in sys.argv or 'jenkins' in sys.argv) and not os.environ.get('SKIP_LICENSE_FIXUP_FOR_TEST', ''):\n+ if ('test' in sys.argv or 'py.test' in sys.argv[0] or 'jenkins' in sys.argv) and not os.environ.get('SKIP_LICENSE_FIXUP_FOR_TEST', ''):\n validation_info['free_instances'] = 99999999\n validation_info['time_remaining'] = 99999999\n validation_info['grace_period_remaining'] = 99999999\n@@ -912,8 +912,8 @@ def get_queryset(self):\n )\n \n return base_qs.filter(\n- Q(id__in=org_admin_ids) | \n- Q(id__in=perm_deploy_ids) | \n+ Q(id__in=org_admin_ids) |\n+ Q(id__in=perm_deploy_ids) |\n Q(id__in=perm_check_ids)\n )\n \n@@ -926,7 +926,7 @@ def can_add(self, data):\n a user can create a job template if they are a superuser, an org admin\n of any org that the project is a member, or if they have user or team\n based permissions tying the project to the inventory source for the\n- given action as well as the 'create' deploy permission. \n+ given action as well as the 'create' deploy permission.\n Users who are able to create deploy jobs can also run normal and check (dry run) jobs.\n '''\n if not data or '_method' in data: # So the browseable API will work?\n@@ -1126,8 +1126,8 @@ def get_queryset(self):\n )\n \n return base_qs.filter(\n- Q(id__in=org_admin_ids) | \n- Q(id__in=perm_deploy_ids) | \n+ Q(id__in=org_admin_ids) |\n+ Q(id__in=perm_deploy_ids) |\n Q(id__in=perm_check_ids)\n )\n \n@@ -1367,9 +1367,9 @@ class UnifiedJobTemplateAccess(BaseAccess):\n projects without SCM configured or inventory sources without a cloud\n source.\n '''\n- \n+\n model = UnifiedJobTemplate\n- \n+\n def get_queryset(self):\n qs = self.model.objects.filter(active=True).distinct()\n project_qs = self.user.get_queryset(Project).filter(scm_type__in=[s[0] for s in Project.SCM_TYPE_CHOICES])" } ]
awx
7ab839c17f9327fe335d53f741c1d48932ce38f3
fb4cad9838d978a08d9e7800fdfb0c23baf1c9df
awx/plugins/callback/job_event_callback.py
https://github.com/gconsidine/awx
true
false
true
@@ -212,7 +212,7 @@ class BaseCallbackModule(object): def _log_event(self, event, **event_data): if self.callback_consumer_port: - with statsd.timer('zmq_post_event_msg.{}'.format(event)): + with statsd.timer('zmq_post_event_msg.{0}'.format(event)): self._post_job_event_queue_msg(event, event_data) else: self._post_rest_api_event(event, event_data)
with statsd . timer ( 'zmq_post_event_msg.{}' . format ( event ) ) : self . _post_job_event_queue_msg ( event , event_data )
with statsd . timer ( 'zmq_post_event_msg.{0}' . format ( event ) ) : self . _post_job_event_queue_msg ( event , event_data )
CHANGE_STRING_LITERAL
[["Update", ["string:'zmq_post_event_msg.{}'", 3, 31, 3, 54], "'zmq_post_event_msg.{0}'"]]
gconsidine/awx@7ab839c17f9327fe335d53f741c1d48932ce38f3
Fix up another statsd formatting error
[ { "sha": "2026f0c410e269766e2d5c371e9cbd07eae2acf7", "filename": "awx/plugins/callback/job_event_callback.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/7ab839c17f9327fe335d53f741c1d48932ce38f3/awx%2Fplugins%2Fcallback%2Fjob_event_callback.py", "raw_url": "https://github.com/gconsidine/awx/raw/7ab839c17f9327fe335d53f741c1d48932ce38f3/awx%2Fplugins%2Fcallback%2Fjob_event_callback.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fplugins%2Fcallback%2Fjob_event_callback.py?ref=7ab839c17f9327fe335d53f741c1d48932ce38f3", "patch": "@@ -212,7 +212,7 @@ def _post_rest_api_event(self, event, event_data):\n \n def _log_event(self, event, **event_data):\n if self.callback_consumer_port:\n- with statsd.timer('zmq_post_event_msg.{}'.format(event)):\n+ with statsd.timer('zmq_post_event_msg.{0}'.format(event)):\n self._post_job_event_queue_msg(event, event_data)\n else:\n self._post_rest_api_event(event, event_data)" } ]
awx
b82bd9404f32c2acbf4c8cbe0ddafd7920b22f68
58499175d6c8a4d9a53d211d320978dabb07db69
awx/main/tasks.py
https://github.com/gconsidine/awx
true
false
true
@@ -1219,7 +1219,7 @@ class RunInventoryUpdate(BaseTask): env['GCE_EMAIL'] = passwords.get('source_username', '') env['GCE_PROJECT'] = passwords.get('source_project', '') env['GCE_PEM_FILE_PATH'] = cloud_credential - env['GCE_ZONE'] = inventory_source.source_regions + env['GCE_ZONE'] = inventory_update.source_regions elif inventory_update.source == 'openstack': env['OS_CLIENT_CONFIG_FILE'] = cloud_credential
env [ 'GCE_ZONE' ] = inventory_source . source_regions
env [ 'GCE_ZONE' ] = inventory_update . source_regions
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:inventory_source", 3, 31, 3, 47], "inventory_update"]]
gconsidine/awx@b82bd9404f32c2acbf4c8cbe0ddafd7920b22f68
fixing inventory bug for GCE introduced fixing #616
[ { "sha": "bbb63bd359e26f670ad72452f3ca696b2a21b027", "filename": "awx/main/tasks.py", "status": "modified", "additions": 3, "deletions": 3, "changes": 6, "blob_url": "https://github.com/gconsidine/awx/blob/b82bd9404f32c2acbf4c8cbe0ddafd7920b22f68/awx%2Fmain%2Ftasks.py", "raw_url": "https://github.com/gconsidine/awx/raw/b82bd9404f32c2acbf4c8cbe0ddafd7920b22f68/awx%2Fmain%2Ftasks.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftasks.py?ref=b82bd9404f32c2acbf4c8cbe0ddafd7920b22f68", "patch": "@@ -843,7 +843,7 @@ def get_password_prompts(self):\n d[re.compile(r'^PFEXEC password.*:\\s*?$', re.M)] = 'become_password'\n d[re.compile(r'^pfexec password.*:\\s*?$', re.M)] = 'become_password'\n d[re.compile(r'^RUNAS password.*:\\s*?$', re.M)] = 'become_password'\n- d[re.compile(r'^runas password.*:\\s*?$', re.M)] = 'become_password' \n+ d[re.compile(r'^runas password.*:\\s*?$', re.M)] = 'become_password'\n d[re.compile(r'^SSH password:\\s*?$', re.M)] = 'ssh_password'\n d[re.compile(r'^Password:\\s*?$', re.M)] = 'ssh_password'\n d[re.compile(r'^Vault password:\\s*?$', re.M)] = 'vault_password'\n@@ -1219,7 +1219,7 @@ def build_env(self, inventory_update, **kwargs):\n env['GCE_EMAIL'] = passwords.get('source_username', '')\n env['GCE_PROJECT'] = passwords.get('source_project', '')\n env['GCE_PEM_FILE_PATH'] = cloud_credential\n- env['GCE_ZONE'] = inventory_source.source_regions\n+ env['GCE_ZONE'] = inventory_update.source_regions\n elif inventory_update.source == 'openstack':\n env['OS_CLIENT_CONFIG_FILE'] = cloud_credential\n elif inventory_update.source == 'file':\n@@ -1473,7 +1473,7 @@ def get_password_prompts(self):\n d[re.compile(r'^PFEXEC password.*:\\s*?$', re.M)] = 'become_password'\n d[re.compile(r'^pfexec password.*:\\s*?$', re.M)] = 'become_password'\n d[re.compile(r'^RUNAS password.*:\\s*?$', re.M)] = 'become_password'\n- d[re.compile(r'^runas password.*:\\s*?$', re.M)] = 'become_password' \n+ d[re.compile(r'^runas password.*:\\s*?$', re.M)] = 'become_password'\n d[re.compile(r'^SSH password:\\s*?$', re.M)] = 'ssh_password'\n d[re.compile(r'^Password:\\s*?$', re.M)] = 'ssh_password'\n return d" } ]
awx
7d4b54a651f73bb86de009842052564fadc8efe6
0ff94e424dba127054cf949a3ddd914ae0452c03
awx/main/models/mixins.py
https://github.com/gconsidine/awx
true
false
false
@@ -8,7 +8,7 @@ from awx.main.models.rbac import Resource from awx.main.fields import ImplicitResourceField -__all__ = 'ResourceMixin' +__all__ = ['ResourceMixin'] class ResourceMixin(models.Model):
__all__ = 'ResourceMixin'
__all__ = [ 'ResourceMixin' ]
SINGLE_STMT
[["Insert", ["assignment", 3, 1, 3, 26], ["list", "N0"], 2], ["Insert", "N0", ["[:[", "T"], 0], ["Move", "N0", ["string:'ResourceMixin'", 3, 11, 3, 26], 1], ["Insert", "N0", ["]:]", "T"], 2]]
gconsidine/awx@7d4b54a651f73bb86de009842052564fadc8efe6
Fixed __all__ def
[ { "sha": "63ecf3a0dd0af4e550f45cabc6452971c4eca748", "filename": "awx/main/models/mixins.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/7d4b54a651f73bb86de009842052564fadc8efe6/awx%2Fmain%2Fmodels%2Fmixins.py", "raw_url": "https://github.com/gconsidine/awx/raw/7d4b54a651f73bb86de009842052564fadc8efe6/awx%2Fmain%2Fmodels%2Fmixins.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmodels%2Fmixins.py?ref=7d4b54a651f73bb86de009842052564fadc8efe6", "patch": "@@ -8,7 +8,7 @@\n from awx.main.fields import ImplicitResourceField\n \n \n-__all__ = 'ResourceMixin'\n+__all__ = ['ResourceMixin']\n \n class ResourceMixin(models.Model):\n " } ]
awx
a9bf29d2a7cbf827ed77d52e9a14792e280d95c1
3228b41eea527195c51e093821c1da14ff5af5e9
awx/api/serializers.py
https://github.com/gconsidine/awx
true
false
false
@@ -50,7 +50,7 @@ from awx.fact.models import * # noqa logger = logging.getLogger('awx.api.serializers') # Fields that should be summarized regardless of object type. -DEFAULT_SUMMARY_FIELDS = ('name', 'description')# , 'created_by', 'modified_by')#, 'type') +DEFAULT_SUMMARY_FIELDS = ('id', 'name', 'description')# , 'created_by', 'modified_by')#, 'type') # Keys are fields (foreign keys) where, if found on an instance, summary info # should be added to the serialized data. Values are a tuple of field names on
DEFAULT_SUMMARY_FIELDS = ( 'name' , 'description' )
DEFAULT_SUMMARY_FIELDS = ( 'id' , 'name' , 'description' )
ADD_ELEMENTS_TO_ITERABLE
[["Move", ["string:'name'", 3, 27, 3, 33], ["tuple", 3, 26, 3, 49], 2], ["Insert", ["tuple", 3, 26, 3, 49], ["string:'id'", "T"], 1], ["Insert", ["tuple", 3, 26, 3, 49], [",:,", "T"], 3]]
gconsidine/awx@a9bf29d2a7cbf827ed77d52e9a14792e280d95c1
Add the id field to the default summary fields This is to support the UI. ref: #1063
[ { "sha": "948e61c00d0c685c2378c966846f85fe2048cb85", "filename": "awx/api/serializers.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/a9bf29d2a7cbf827ed77d52e9a14792e280d95c1/awx%2Fapi%2Fserializers.py", "raw_url": "https://github.com/gconsidine/awx/raw/a9bf29d2a7cbf827ed77d52e9a14792e280d95c1/awx%2Fapi%2Fserializers.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fserializers.py?ref=a9bf29d2a7cbf827ed77d52e9a14792e280d95c1", "patch": "@@ -50,7 +50,7 @@\n logger = logging.getLogger('awx.api.serializers')\n \n # Fields that should be summarized regardless of object type.\n-DEFAULT_SUMMARY_FIELDS = ('name', 'description')# , 'created_by', 'modified_by')#, 'type')\n+DEFAULT_SUMMARY_FIELDS = ('id', 'name', 'description')# , 'created_by', 'modified_by')#, 'type')\n \n # Keys are fields (foreign keys) where, if found on an instance, summary info\n # should be added to the serialized data. Values are a tuple of field names on" } ]
awx
44618d49561c314477a3c54a9665ae92fdf72ea8
88bf44bfcbf75c938587faa2fb868ff082d85f80
awx/main/tests/functional/commands/test_cleanup_facts.py
https://github.com/gconsidine/awx
true
false
true
@@ -71,7 +71,7 @@ def test_cleanup_logic(fact_scans, hosts): module = 'ansible' cleanup_facts = CleanupFacts() - deleted_count = cleanup_facts.cleanup(timestamp_middle, granularity, module=module) + cleanup_facts.cleanup(timestamp_middle, granularity, module=module) host_ids = Host.objects.all().values_list('id', flat=True)
deleted_count = cleanup_facts . cleanup ( timestamp_middle , granularity , module = module )
cleanup_facts . cleanup ( timestamp_middle , granularity , module = module )
SINGLE_STMT
[["Move", ["expression_statement", 3, 5, 3, 88], ["call", 3, 21, 3, 88], 0], ["Delete", ["identifier:deleted_count", 3, 5, 3, 18]], ["Delete", ["=:=", 3, 19, 3, 20]], ["Delete", ["assignment", 3, 5, 3, 88]]]
gconsidine/awx@44618d49561c314477a3c54a9665ae92fdf72ea8
flake8 fix
[ { "sha": "d741b583023cf25a209b70dd371623e6daf626da", "filename": "awx/main/tests/functional/commands/test_cleanup_facts.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/44618d49561c314477a3c54a9665ae92fdf72ea8/awx%2Fmain%2Ftests%2Ffunctional%2Fcommands%2Ftest_cleanup_facts.py", "raw_url": "https://github.com/gconsidine/awx/raw/44618d49561c314477a3c54a9665ae92fdf72ea8/awx%2Fmain%2Ftests%2Ffunctional%2Fcommands%2Ftest_cleanup_facts.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftests%2Ffunctional%2Fcommands%2Ftest_cleanup_facts.py?ref=44618d49561c314477a3c54a9665ae92fdf72ea8", "patch": "@@ -71,7 +71,7 @@ def test_cleanup_logic(fact_scans, hosts):\n module = 'ansible'\n \n cleanup_facts = CleanupFacts()\n- deleted_count = cleanup_facts.cleanup(timestamp_middle, granularity, module=module)\n+ cleanup_facts.cleanup(timestamp_middle, granularity, module=module)\n \n \n host_ids = Host.objects.all().values_list('id', flat=True)" } ]
awx
32fe6ea4fa2aa710d2627b6c19c0e25013b72a1d
3801b4a4e9319a5d2e8606d7dc5ab05b8ee130e3
awx/main/tests/functional/test_rbac_workflow.py
https://github.com/gconsidine/awx
true
false
true
@@ -24,7 +24,7 @@ def wfjt_node(wfjt_with_nodes): @pytest.fixture def workflow_job(wfjt): - return wfjt.jobs.create(name='test_workflow') + return wfjt.workflow_jobs.create(name='test_workflow') @pytest.mark.django_db
return wfjt . jobs . create ( name = 'test_workflow' )
return wfjt . workflow_jobs . create ( name = 'test_workflow' )
CHANGE_ATTRIBUTE_USED
[["Update", ["identifier:jobs", 3, 17, 3, 21], "workflow_jobs"]]
gconsidine/awx@32fe6ea4fa2aa710d2627b6c19c0e25013b72a1d
Test fixture update.
[ { "sha": "822fdc0bcc503c41029d7810ae6c0ef218e86789", "filename": "awx/main/tests/functional/test_rbac_workflow.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/32fe6ea4fa2aa710d2627b6c19c0e25013b72a1d/awx%2Fmain%2Ftests%2Ffunctional%2Ftest_rbac_workflow.py", "raw_url": "https://github.com/gconsidine/awx/raw/32fe6ea4fa2aa710d2627b6c19c0e25013b72a1d/awx%2Fmain%2Ftests%2Ffunctional%2Ftest_rbac_workflow.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftests%2Ffunctional%2Ftest_rbac_workflow.py?ref=32fe6ea4fa2aa710d2627b6c19c0e25013b72a1d", "patch": "@@ -24,7 +24,7 @@ def wfjt_node(wfjt_with_nodes):\n \n @pytest.fixture\n def workflow_job(wfjt):\n- return wfjt.jobs.create(name='test_workflow')\n+ return wfjt.workflow_jobs.create(name='test_workflow')\n \n \n @pytest.mark.django_db" } ]
awx
d882a14d8761f614509443cf8867d5945203eab6
cd084f5c60897cb77ca7792095c894d47efc9eab
awx/main/models/unified_jobs.py
https://github.com/gconsidine/awx
true
false
true
@@ -801,7 +801,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique emit_channel_notification('jobs-status_changed', status_data) if self.spawned_by_workflow: - event_serialized['group_name'] = "workflow_events" + status_data['group_name'] = "workflow_events" emit_channel_notification('workflow_events-' + str(self.workflow_job_id), status_data)
event_serialized [ 'group_name' ] = "workflow_events"
status_data [ 'group_name' ] = "workflow_events"
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:event_serialized", 3, 13, 3, 29], "status_data"]]
gconsidine/awx@d882a14d8761f614509443cf8867d5945203eab6
fix group_name for workflow_events
[ { "sha": "7e95e5abd74822bc13aa0a86aa2d4f434772522c", "filename": "awx/main/models/unified_jobs.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/d882a14d8761f614509443cf8867d5945203eab6/awx%2Fmain%2Fmodels%2Funified_jobs.py", "raw_url": "https://github.com/gconsidine/awx/raw/d882a14d8761f614509443cf8867d5945203eab6/awx%2Fmain%2Fmodels%2Funified_jobs.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmodels%2Funified_jobs.py?ref=d882a14d8761f614509443cf8867d5945203eab6", "patch": "@@ -801,7 +801,7 @@ def websocket_emit_status(self, status):\n emit_channel_notification('jobs-status_changed', status_data)\n \n if self.spawned_by_workflow:\n- event_serialized['group_name'] = \"workflow_events\"\n+ status_data['group_name'] = \"workflow_events\"\n emit_channel_notification('workflow_events-' + str(self.workflow_job_id), status_data)\n \n " } ]
awx
e725b16cd919321d350d2a92532ffcdbf500dfca
5df4096660deb6ce385fc1b8b5999b2e3b1ce88a
awx/main/tasks.py
https://github.com/gconsidine/awx
true
false
true
@@ -1019,7 +1019,7 @@ class RunJob(BaseTask): def pre_run_hook(self, job, **kwargs): if job.project and job.project.scm_type: - local_project_sync = job.project.create_project_update() + local_project_sync = job.project.create_project_update(launch_type="sync") local_project_sync.job_type = 'run' local_project_sync.save() project_update_task = local_project_sync._get_task_class()
local_project_sync = job . project . create_project_update ( )
local_project_sync = job . project . create_project_update ( launch_type = "sync" )
SAME_FUNCTION_MORE_ARGS
[["Insert", ["argument_list", 3, 67, 3, 69], ["keyword_argument", "N0"], 1], ["Insert", "N0", ["identifier:launch_type", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["string:\"sync\"", "T"], 2]]
gconsidine/awx@e725b16cd919321d350d2a92532ffcdbf500dfca
Change job-run project refresh launch type to "sync"
[ { "sha": "0c8aa8acce4f39687828de9e786985d1562abd0c", "filename": "awx/main/tasks.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/e725b16cd919321d350d2a92532ffcdbf500dfca/awx%2Fmain%2Ftasks.py", "raw_url": "https://github.com/gconsidine/awx/raw/e725b16cd919321d350d2a92532ffcdbf500dfca/awx%2Fmain%2Ftasks.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftasks.py?ref=e725b16cd919321d350d2a92532ffcdbf500dfca", "patch": "@@ -1019,7 +1019,7 @@ def should_use_proot(self, instance, **kwargs):\n \n def pre_run_hook(self, job, **kwargs):\n if job.project and job.project.scm_type:\n- local_project_sync = job.project.create_project_update()\n+ local_project_sync = job.project.create_project_update(launch_type=\"sync\")\n local_project_sync.job_type = 'run'\n local_project_sync.save()\n project_update_task = local_project_sync._get_task_class()" } ]
awx
fbd27e25a6ba9086bee6fa3bb5eefa9a6b778480
29cc5d0f2ecaceaa5348d51235feea385a6d375d
awx/main/models/unified_jobs.py
https://github.com/gconsidine/awx
true
false
true
@@ -675,7 +675,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique fields = unified_jt_class._get_unified_job_field_names() + [parent_field_name] unified_job = copy_model_by_class(self, unified_job_class, fields, {}) - unified_job.job_type = 'relaunch' + unified_job.launch_type = 'relaunch' unified_job.save() # Labels coppied here
unified_job . job_type = 'relaunch'
unified_job . launch_type = 'relaunch'
CHANGE_ATTRIBUTE_USED
[["Update", ["identifier:job_type", 3, 21, 3, 29], "launch_type"]]
gconsidine/awx@fbd27e25a6ba9086bee6fa3bb5eefa9a6b778480
Fix scan job relaunch bug
[ { "sha": "7bc89d657c338dca436dfb7734672f4ef3cdca8d", "filename": "awx/main/models/unified_jobs.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/fbd27e25a6ba9086bee6fa3bb5eefa9a6b778480/awx%2Fmain%2Fmodels%2Funified_jobs.py", "raw_url": "https://github.com/gconsidine/awx/raw/fbd27e25a6ba9086bee6fa3bb5eefa9a6b778480/awx%2Fmain%2Fmodels%2Funified_jobs.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmodels%2Funified_jobs.py?ref=fbd27e25a6ba9086bee6fa3bb5eefa9a6b778480", "patch": "@@ -675,7 +675,7 @@ def copy_unified_job(self):\n \n fields = unified_jt_class._get_unified_job_field_names() + [parent_field_name]\n unified_job = copy_model_by_class(self, unified_job_class, fields, {})\n- unified_job.job_type = 'relaunch'\n+ unified_job.launch_type = 'relaunch'\n unified_job.save()\n \n # Labels coppied here" } ]
awx
4295ab3e4a78357b7c11e9c1a194bff79425aec6
d8ee92aefd8d09f25a8d40c4d116b3818fd29eec
awx/api/views.py
https://github.com/gconsidine/awx
true
false
true
@@ -534,7 +534,7 @@ class AuthView(APIView): saml_backend_data = dict(backend_data.items()) saml_backend_data['login_url'] = '%s?idp=%s' % (login_url, idp) full_backend_name = '%s:%s' % (name, idp) - if err_backend == full_backend_name and err_message: + if (err_backend == full_backend_name or err_backend == name) and err_message: saml_backend_data['error'] = err_message data[full_backend_name] = saml_backend_data else:
if err_backend == full_backend_name and err_message : saml_backend_data [ 'error' ] = err_message
if ( err_backend == full_backend_name or err_backend == name ) and err_message : saml_backend_data [ 'error' ] = err_message
CHANGE_BINARY_OPERAND
[["Insert", ["boolean_operator", 3, 24, 3, 72], ["parenthesized_expression", "N0"], 0], ["Insert", "N0", ["(:(", "T"], 0], ["Insert", "N0", ["boolean_operator", "N1"], 1], ["Insert", "N0", ["):)", "T"], 2], ["Move", "N1", ["comparison_operator", 3, 24, 3, 56], 0], ["Insert", "N1", ["or:or", "T"], 1], ["Insert", "N1", ["comparison_operator", "N2"], 2], ["Insert", "N2", ["identifier:err_backend", "T"], 0], ["Insert", "N2", ["==:==", "T"], 1], ["Insert", "N2", ["identifier:name", "T"], 2]]
gconsidine/awx@4295ab3e4a78357b7c11e9c1a194bff79425aec6
Show SAML errors that aren't tied to a specific IdP.
[ { "sha": "734d1159b61b1e6c8f422cfdf4d29416c18fd6b3", "filename": "awx/api/views.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/4295ab3e4a78357b7c11e9c1a194bff79425aec6/awx%2Fapi%2Fviews.py", "raw_url": "https://github.com/gconsidine/awx/raw/4295ab3e4a78357b7c11e9c1a194bff79425aec6/awx%2Fapi%2Fviews.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fviews.py?ref=4295ab3e4a78357b7c11e9c1a194bff79425aec6", "patch": "@@ -534,7 +534,7 @@ def get(self, request):\n saml_backend_data = dict(backend_data.items())\n saml_backend_data['login_url'] = '%s?idp=%s' % (login_url, idp)\n full_backend_name = '%s:%s' % (name, idp)\n- if err_backend == full_backend_name and err_message:\n+ if (err_backend == full_backend_name or err_backend == name) and err_message:\n saml_backend_data['error'] = err_message\n data[full_backend_name] = saml_backend_data\n else:" } ]
awx
e5278e2291bb0db8b1b911dc298a5a2e125d049c
26adcf5972f877b07b5cc7ba8ee7c27b5517455b
awx/api/serializers.py
https://github.com/gconsidine/awx
true
false
true
@@ -2420,7 +2420,7 @@ class WorkflowJobNodeSerializer(WorkflowNodeBaseSerializer): res['failure_nodes'] = reverse('api:workflow_job_node_failure_nodes_list', args=(obj.pk,)) res['always_nodes'] = reverse('api:workflow_job_node_always_nodes_list', args=(obj.pk,)) if obj.job: - res['job'] = reverse('api:job_detail', args=(obj.job.pk,)) + res['job'] = obj.job.get_absolute_url() if obj.workflow_job: res['workflow_job'] = reverse('api:workflow_job_detail', args=(obj.workflow_job.pk,)) return res
res [ 'job' ] = reverse ( 'api:job_detail' , args = ( obj . job . pk , ) )
res [ 'job' ] = obj . job . get_absolute_url ( )
SINGLE_STMT
[["Move", ["call", 3, 26, 3, 71], ["attribute", 3, 58, 3, 68], 0], ["Insert", ["call", 3, 26, 3, 71], ["argument_list", "N0"], 1], ["Update", ["identifier:pk", 3, 66, 3, 68], "get_absolute_url"], ["Insert", "N0", ["(:(", "T"], 0], ["Move", "N0", ["):)", 3, 69, 3, 70], 1], ["Delete", ["identifier:reverse", 3, 26, 3, 33]], ["Delete", ["(:(", 3, 33, 3, 34]], ["Delete", ["string:'api:job_detail'", 3, 34, 3, 50]], ["Delete", [",:,", 3, 50, 3, 51]], ["Delete", ["identifier:args", 3, 52, 3, 56]], ["Delete", ["=:=", 3, 56, 3, 57]], ["Delete", ["(:(", 3, 57, 3, 58]], ["Delete", [",:,", 3, 68, 3, 69]], ["Delete", ["tuple", 3, 57, 3, 70]], ["Delete", ["keyword_argument", 3, 52, 3, 70]], ["Delete", ["):)", 3, 70, 3, 71]], ["Delete", ["argument_list", 3, 33, 3, 71]]]
gconsidine/awx@e5278e2291bb0db8b1b911dc298a5a2e125d049c
Fix wfj node related destination.
[ { "sha": "b5a5b326efb8f4e644d0166cf3f779f062db5e76", "filename": "awx/api/serializers.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/e5278e2291bb0db8b1b911dc298a5a2e125d049c/awx%2Fapi%2Fserializers.py", "raw_url": "https://github.com/gconsidine/awx/raw/e5278e2291bb0db8b1b911dc298a5a2e125d049c/awx%2Fapi%2Fserializers.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fserializers.py?ref=e5278e2291bb0db8b1b911dc298a5a2e125d049c", "patch": "@@ -2420,7 +2420,7 @@ def get_related(self, obj):\n res['failure_nodes'] = reverse('api:workflow_job_node_failure_nodes_list', args=(obj.pk,))\n res['always_nodes'] = reverse('api:workflow_job_node_always_nodes_list', args=(obj.pk,))\n if obj.job:\n- res['job'] = reverse('api:job_detail', args=(obj.job.pk,))\n+ res['job'] = obj.job.get_absolute_url()\n if obj.workflow_job:\n res['workflow_job'] = reverse('api:workflow_job_detail', args=(obj.workflow_job.pk,))\n return res" } ]
awx
93ce2a5dfd1f9e030d6bfcc1edcec6f04d579213
e00e4f6a10bbd146b90ff7b94e7e693c870ff5a1
awx/settings/defaults.py
https://github.com/gconsidine/awx
true
false
false
@@ -538,7 +538,7 @@ CAPTURE_JOB_EVENT_HOSTS = False # Enable bubblewrap support for running jobs (playbook runs only). # Note: This setting may be overridden by database settings. -AWX_PROOT_ENABLED = False +AWX_PROOT_ENABLED = True # Command/path to bubblewrap. AWX_PROOT_CMD = 'bwrap'
AWX_PROOT_ENABLED = False
AWX_PROOT_ENABLED = True
CHANGE_BOOLEAN_LITERAL
[["Insert", ["assignment", 3, 1, 3, 26], ["true:True", "T"], 2], ["Delete", ["false:False", 3, 21, 3, 26]]]
gconsidine/awx@93ce2a5dfd1f9e030d6bfcc1edcec6f04d579213
Set proot/bubblewrap enabled by default
[ { "sha": "fbb9c8fb736c9f6a5f81d11729368fb004ed7a67", "filename": "awx/settings/defaults.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/93ce2a5dfd1f9e030d6bfcc1edcec6f04d579213/awx%2Fsettings%2Fdefaults.py", "raw_url": "https://github.com/gconsidine/awx/raw/93ce2a5dfd1f9e030d6bfcc1edcec6f04d579213/awx%2Fsettings%2Fdefaults.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fsettings%2Fdefaults.py?ref=93ce2a5dfd1f9e030d6bfcc1edcec6f04d579213", "patch": "@@ -538,7 +538,7 @@ def IS_TESTING(argv=None):\n \n # Enable bubblewrap support for running jobs (playbook runs only).\n # Note: This setting may be overridden by database settings.\n-AWX_PROOT_ENABLED = False\n+AWX_PROOT_ENABLED = True\n \n # Command/path to bubblewrap.\n AWX_PROOT_CMD = 'bwrap'" } ]
awx
ec9065bc2f45d5c0022b6aeffa6a6bc4cb96ac57
f89bd15bf513d0855e5660a69c6c8f8f2fd7f01f
awx/api/serializers.py
https://github.com/gconsidine/awx
true
false
false
@@ -81,7 +81,7 @@ SUMMARIZABLE_FK_FIELDS = { 'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud'), 'cloud_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud'), 'network_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'net'), - 'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'started', 'finished'), + 'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed'), 'job_template': DEFAULT_SUMMARY_FIELDS, 'schedule': DEFAULT_SUMMARY_FIELDS + ('next_run',), 'unified_job_template': DEFAULT_SUMMARY_FIELDS + ('unified_job_type',),
DEFAULT_SUMMARY_FIELDS + ( 'status' , 'failed' , 'started' , 'finished' ) ,
DEFAULT_SUMMARY_FIELDS + ( 'status' , 'failed' , 'elapsed' ) ,
CHANGE_BINARY_OPERAND
[["Update", ["string:'started'", 3, 58, 3, 67], "'elapsed'"], ["Delete", [",:,", 3, 67, 3, 68]], ["Delete", ["string:'finished'", 3, 69, 3, 79]]]
gconsidine/awx@ec9065bc2f45d5c0022b6aeffa6a6bc4cb96ac57
Added elapsed to the job default summary fields and removed started/finished
[ { "sha": "f6f7a3aae316ebef6bf0c098d662e30e4b3366aa", "filename": "awx/api/serializers.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/ec9065bc2f45d5c0022b6aeffa6a6bc4cb96ac57/awx%2Fapi%2Fserializers.py", "raw_url": "https://github.com/gconsidine/awx/raw/ec9065bc2f45d5c0022b6aeffa6a6bc4cb96ac57/awx%2Fapi%2Fserializers.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fserializers.py?ref=ec9065bc2f45d5c0022b6aeffa6a6bc4cb96ac57", "patch": "@@ -81,7 +81,7 @@\n 'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud'),\n 'cloud_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud'),\n 'network_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'net'),\n- 'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'started', 'finished'),\n+ 'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed'),\n 'job_template': DEFAULT_SUMMARY_FIELDS,\n 'schedule': DEFAULT_SUMMARY_FIELDS + ('next_run',),\n 'unified_job_template': DEFAULT_SUMMARY_FIELDS + ('unified_job_type',)," } ]
awx
e69f6726d0ed72eaa49cf14751cda6e03d3d4cb8
2ec47737bb0cb1d57c43e39a526b323f4f88ca29
awx/main/tests/unit/test_network_credential.py
https://github.com/gconsidine/awx
true
false
true
@@ -77,7 +77,7 @@ def test_net_cred_ssh_agent(mocker, get_ssh_version): mocker.patch.object(run_job, 'post_run_hook', return_value=None) run_job.run(mock_job.id) - assert run_job.update_model.call_count == 3 + assert run_job.update_model.call_count == 4 job_args = run_job.update_model.call_args_list[1][1].get('job_args') assert 'ssh-add' in job_args
assert run_job . update_model . call_count == 3
assert run_job . update_model . call_count == 4
CHANGE_NUMERIC_LITERAL
[["Update", ["integer:3", 3, 51, 3, 52], "4"]]
gconsidine/awx@e69f6726d0ed72eaa49cf14751cda6e03d3d4cb8
fix unit test
[ { "sha": "7ae97fe76baa772923d29357a717eef99459f51d", "filename": "awx/main/tests/unit/test_network_credential.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/e69f6726d0ed72eaa49cf14751cda6e03d3d4cb8/awx%2Fmain%2Ftests%2Funit%2Ftest_network_credential.py", "raw_url": "https://github.com/gconsidine/awx/raw/e69f6726d0ed72eaa49cf14751cda6e03d3d4cb8/awx%2Fmain%2Ftests%2Funit%2Ftest_network_credential.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftests%2Funit%2Ftest_network_credential.py?ref=e69f6726d0ed72eaa49cf14751cda6e03d3d4cb8", "patch": "@@ -77,7 +77,7 @@ def test_net_cred_ssh_agent(mocker, get_ssh_version):\n mocker.patch.object(run_job, 'post_run_hook', return_value=None)\n \n run_job.run(mock_job.id)\n- assert run_job.update_model.call_count == 3\n+ assert run_job.update_model.call_count == 4\n \n job_args = run_job.update_model.call_args_list[1][1].get('job_args')\n assert 'ssh-add' in job_args" } ]
awx
ef1f77bf8e9fe4f72b9ff7e84cf35fce59417aa1
2c737a2238738ecbf297b828d7bf09e3d8434db1
awx/main/tasks.py
https://github.com/gconsidine/awx
true
false
true
@@ -1057,7 +1057,7 @@ class RunJob(BaseTask): project_update_task = local_project_sync._get_task_class() try: project_update_task().run(local_project_sync.id) - job = self.update_model(job.pk, scm_revision=project.scm_revision) + job = self.update_model(job.pk, scm_revision=job.project.scm_revision) except Exception: job = self.update_model(job.pk, status='failed', job_explanation=('Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' %
job = self . update_model ( job . pk , scm_revision = project . scm_revision )
job = self . update_model ( job . pk , scm_revision = job . project . scm_revision )
SINGLE_STMT
[["Insert", ["attribute", 3, 62, 3, 82], ["attribute", "N0"], 0], ["Insert", "N0", ["identifier:job", "T"], 0], ["Insert", "N0", [".:.", "T"], 1], ["Move", "N0", ["identifier:project", 3, 62, 3, 69], 2]]
gconsidine/awx@ef1f77bf8e9fe4f72b9ff7e84cf35fce59417aa1
fix what I broke with the job through cancel proj update
[ { "sha": "63128635abda26fb38002c1f5d4cbc6f4688caeb", "filename": "awx/main/tasks.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/ef1f77bf8e9fe4f72b9ff7e84cf35fce59417aa1/awx%2Fmain%2Ftasks.py", "raw_url": "https://github.com/gconsidine/awx/raw/ef1f77bf8e9fe4f72b9ff7e84cf35fce59417aa1/awx%2Fmain%2Ftasks.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftasks.py?ref=ef1f77bf8e9fe4f72b9ff7e84cf35fce59417aa1", "patch": "@@ -1057,7 +1057,7 @@ def pre_run_hook(self, job, **kwargs):\n project_update_task = local_project_sync._get_task_class()\n try:\n project_update_task().run(local_project_sync.id)\n- job = self.update_model(job.pk, scm_revision=project.scm_revision)\n+ job = self.update_model(job.pk, scm_revision=job.project.scm_revision)\n except Exception:\n job = self.update_model(job.pk, status='failed',\n job_explanation=('Previous Task Failed: {\"job_type\": \"%s\", \"job_name\": \"%s\", \"job_id\": \"%s\"}' % " } ]
awx
60ea0bdf928208da260216bd80d0b35eaeae29c0
2afdf35375fef91872101aeaf1ddedde9a533199
awx/api/views.py
https://github.com/gconsidine/awx
true
false
true
@@ -603,7 +603,7 @@ class AuthTokenView(APIView): return Response({'token': token.key, 'expires': token.expires}, headers=headers) if 'username' in request.data: logger.warning(smart_text(u"Login failed for user {}".format(request.data['username'])), - user=dict(actor=request.data['username'])) + extra=dict(actor=request.data['username'])) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
logger . warning ( smart_text ( u"Login failed for user {}" . format ( request . data [ 'username' ] ) ) , user = dict ( actor = request . data [ 'username' ] ) )
logger . warning ( smart_text ( u"Login failed for user {}" . format ( request . data [ 'username' ] ) ) , extra = dict ( actor = request . data [ 'username' ] ) )
CHANGE_KEYWORD_ARGUMENT_USED
[["Update", ["identifier:user", 3, 28, 3, 32], "extra"]]
gconsidine/awx@60ea0bdf928208da260216bd80d0b35eaeae29c0
fix bug with failed login log
[ { "sha": "e15084a681a6a3aeecbc0815c07e440b51560c04", "filename": "awx/api/views.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/60ea0bdf928208da260216bd80d0b35eaeae29c0/awx%2Fapi%2Fviews.py", "raw_url": "https://github.com/gconsidine/awx/raw/60ea0bdf928208da260216bd80d0b35eaeae29c0/awx%2Fapi%2Fviews.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fviews.py?ref=60ea0bdf928208da260216bd80d0b35eaeae29c0", "patch": "@@ -603,7 +603,7 @@ def post(self, request):\n return Response({'token': token.key, 'expires': token.expires}, headers=headers)\n if 'username' in request.data:\n logger.warning(smart_text(u\"Login failed for user {}\".format(request.data['username'])),\n- user=dict(actor=request.data['username']))\n+ extra=dict(actor=request.data['username']))\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n \n " } ]
awx
e45201aa8796273a68add08fd6f30b0d85bc7484
9d289e4349b3750d7a323d9b99286c5d5d182d79
awx/api/serializers.py
https://github.com/gconsidine/awx
true
false
false
@@ -76,7 +76,7 @@ SUMMARIZABLE_FK_FIELDS = { 'total_groups', 'groups_with_active_failures', 'has_inventory_sources'), - 'project': DEFAULT_SUMMARY_FIELDS + ('status',), + 'project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'), 'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed',), 'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud'), 'cloud_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud'),
DEFAULT_SUMMARY_FIELDS + ( 'status' , ) ,
DEFAULT_SUMMARY_FIELDS + ( 'status' , 'scm_type' ) ,
ADD_ELEMENTS_TO_ITERABLE
[["Insert", ["tuple", 3, 41, 3, 52], ["string:'scm_type'", "T"], 3]]
gconsidine/awx@e45201aa8796273a68add08fd6f30b0d85bc7484
Added scm_type to project summary fields
[ { "sha": "eec91af831103679ee82cd55bdf2d937c863d689", "filename": "awx/api/serializers.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/e45201aa8796273a68add08fd6f30b0d85bc7484/awx%2Fapi%2Fserializers.py", "raw_url": "https://github.com/gconsidine/awx/raw/e45201aa8796273a68add08fd6f30b0d85bc7484/awx%2Fapi%2Fserializers.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fserializers.py?ref=e45201aa8796273a68add08fd6f30b0d85bc7484", "patch": "@@ -76,7 +76,7 @@\n 'total_groups',\n 'groups_with_active_failures',\n 'has_inventory_sources'),\n- 'project': DEFAULT_SUMMARY_FIELDS + ('status',),\n+ 'project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'),\n 'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed',),\n 'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud'),\n 'cloud_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud')," } ]
awx
7acb89ff4a006fa5e15a91b4697d0551c74912aa
93564987d1ecca6e9aa4c2a0a0180b7558115655
awx/main/access.py
https://github.com/gconsidine/awx
true
false
true
@@ -1671,8 +1671,8 @@ class WorkflowJobAccess(BaseAccess): if not node_access.can_add({'reference_obj': node}): wj_add_perm = False if not wj_add_perm and self.save_messages: - self.messages['workflow_job_template'] = ('Template has been modified since job was launched, ' - 'and you do not have permission to its resources.') + self.messages['workflow_job_template'] = _('Template has been modified since job was launched, ' + 'and you do not have permission to its resources.') return wj_add_perm def can_cancel(self, obj):
self . messages [ 'workflow_job_template' ] = ( 'Template has been modified since job was launched, ' 'and you do not have permission to its resources.' )
self . messages [ 'workflow_job_template' ] = _ ( 'Template has been modified since job was launched, ' 'and you do not have permission to its resources.' )
ADD_FUNCTION_AROUND_EXPRESSION
[["Insert", ["assignment", 3, 13, 4, 106], ["call", "N0"], 2], ["Insert", "N0", ["identifier:_", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Move", "N1", ["(:(", 3, 54, 3, 55], 0], ["Move", "N1", ["concatenated_string", 3, 55, 4, 105], 1], ["Move", "N1", ["):)", 4, 105, 4, 106], 2], ["Delete", ["parenthesized_expression", 3, 54, 4, 106]]]
gconsidine/awx@7acb89ff4a006fa5e15a91b4697d0551c74912aa
wrap error message in internationalization marker
[ { "sha": "70ac423098354776577640058e9a8927050f5080", "filename": "awx/main/access.py", "status": "modified", "additions": 2, "deletions": 2, "changes": 4, "blob_url": "https://github.com/gconsidine/awx/blob/7acb89ff4a006fa5e15a91b4697d0551c74912aa/awx%2Fmain%2Faccess.py", "raw_url": "https://github.com/gconsidine/awx/raw/7acb89ff4a006fa5e15a91b4697d0551c74912aa/awx%2Fmain%2Faccess.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Faccess.py?ref=7acb89ff4a006fa5e15a91b4697d0551c74912aa", "patch": "@@ -1671,8 +1671,8 @@ def can_readd(self, obj):\n if not node_access.can_add({'reference_obj': node}):\n wj_add_perm = False\n if not wj_add_perm and self.save_messages:\n- self.messages['workflow_job_template'] = ('Template has been modified since job was launched, '\n- 'and you do not have permission to its resources.')\n+ self.messages['workflow_job_template'] = _('Template has been modified since job was launched, '\n+ 'and you do not have permission to its resources.')\n return wj_add_perm\n \n def can_cancel(self, obj):" } ]
awx
974fe1d2445752b67b3ddd087abd481ba98e58f4
833c61c8cbaedf28196f03a81550ba4091ebb72b
awx/lib/tower_display_callback/module.py
https://github.com/gconsidine/awx
true
false
true
@@ -328,7 +328,7 @@ class BaseCallbackModule(CallbackBase): ok=stats.ok, processed=stats.processed, skipped=stats.skipped, - artifact_data=stats.custom.get('_run', {}) + artifact_data=stats.custom.get('_run', {}) if hasattr(stats, 'custom') else {} ) with self.capture_event_data('playbook_on_stats', **event_data):
processed = stats . processed , skipped = stats . skipped , artifact_data = stats . custom . get ( '_run' , { } )
processed = stats . processed , skipped = stats . skipped , artifact_data = stats . custom . get ( '_run' , { } ) if hasattr ( stats , 'custom' ) else { }
SINGLE_STMT
[["Insert", ["assignment", 2, 21, 3, 55], ["conditional_expression", "N0"], 2], ["Move", "N0", ["call", 3, 27, 3, 55], 0], ["Insert", "N0", ["if:if", "T"], 1], ["Insert", "N0", ["call", "N1"], 2], ["Insert", "N0", ["else:else", "T"], 3], ["Insert", "N0", ["dictionary", "N2"], 4], ["Insert", "N1", ["identifier:hasattr", "T"], 0], ["Insert", "N1", ["argument_list", "N3"], 1], ["Insert", "N2", ["{:{", "T"], 0], ["Insert", "N2", ["}:}", "T"], 1], ["Insert", "N3", ["(:(", "T"], 0], ["Insert", "N3", ["identifier:stats", "T"], 1], ["Insert", "N3", [",:,", "T"], 2], ["Insert", "N3", ["string:'custom'", "T"], 3], ["Insert", "N3", ["):)", "T"], 4]]
gconsidine/awx@974fe1d2445752b67b3ddd087abd481ba98e58f4
Fix an issue with set_stats for unsupported Ansible versions
[ { "sha": "7336da2f088293ec513b4d8dc4ac4839a075b3ee", "filename": "awx/lib/tower_display_callback/module.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/974fe1d2445752b67b3ddd087abd481ba98e58f4/awx%2Flib%2Ftower_display_callback%2Fmodule.py", "raw_url": "https://github.com/gconsidine/awx/raw/974fe1d2445752b67b3ddd087abd481ba98e58f4/awx%2Flib%2Ftower_display_callback%2Fmodule.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Flib%2Ftower_display_callback%2Fmodule.py?ref=974fe1d2445752b67b3ddd087abd481ba98e58f4", "patch": "@@ -328,7 +328,7 @@ def v2_playbook_on_stats(self, stats):\n ok=stats.ok,\n processed=stats.processed,\n skipped=stats.skipped,\n- artifact_data=stats.custom.get('_run', {})\n+ artifact_data=stats.custom.get('_run', {}) if hasattr(stats, 'custom') else {}\n )\n \n with self.capture_event_data('playbook_on_stats', **event_data):" } ]
awx
f2d8800c35fc8ef9b267562af126d2958e224a15
f5d4cbb482b60a705577623daaf46279bbc93ece
awx/main/models/workflow.py
https://github.com/gconsidine/awx
true
false
true
@@ -429,7 +429,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl def node_prompts_rejected(self): node_list = [] - for node in self.workflow_job_template_nodes.select_related('unified_job_template').all(): + for node in self.workflow_job_template_nodes.prefetch_related('unified_job_template').all(): node_prompts_warnings = node.get_prompts_warnings() if node_prompts_warnings: node_list.append(node.pk)
for node in self . workflow_job_template_nodes . select_related ( 'unified_job_template' ) . all ( ) : node_prompts_warnings = node . get_prompts_warnings ( ) if node_prompts_warnings : node_list . append ( node . pk )
for node in self . workflow_job_template_nodes . prefetch_related ( 'unified_job_template' ) . all ( ) : node_prompts_warnings = node . get_prompts_warnings ( ) if node_prompts_warnings : node_list . append ( node . pk )
WRONG_FUNCTION_NAME
[["Update", ["identifier:select_related", 3, 54, 3, 68], "prefetch_related"]]
gconsidine/awx@f2d8800c35fc8ef9b267562af126d2958e224a15
Fix bug where type of node UJT was incorrectly evaluated
[ { "sha": "112bdcfc76f018f194a003a9a0d94d67522e3adf", "filename": "awx/main/models/workflow.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/f2d8800c35fc8ef9b267562af126d2958e224a15/awx%2Fmain%2Fmodels%2Fworkflow.py", "raw_url": "https://github.com/gconsidine/awx/raw/f2d8800c35fc8ef9b267562af126d2958e224a15/awx%2Fmain%2Fmodels%2Fworkflow.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmodels%2Fworkflow.py?ref=f2d8800c35fc8ef9b267562af126d2958e224a15", "patch": "@@ -429,7 +429,7 @@ def node_templates_missing(self):\n \n def node_prompts_rejected(self):\n node_list = []\n- for node in self.workflow_job_template_nodes.select_related('unified_job_template').all():\n+ for node in self.workflow_job_template_nodes.prefetch_related('unified_job_template').all():\n node_prompts_warnings = node.get_prompts_warnings()\n if node_prompts_warnings:\n node_list.append(node.pk)" } ]
awx
df249eda2eda69b047c462679320db9e6878e749
8f804b9da86abfd1740a20592516fb66e1027ea8
awx/settings/defaults.py
https://github.com/gconsidine/awx
true
false
false
@@ -165,7 +165,7 @@ EVENT_STDOUT_MAX_BYTES_DISPLAY = 1024 JOB_EVENT_WORKERS = 4 -JOB_EVENT_MAX_QUEUE_SIZE = 5000 +JOB_EVENT_MAX_QUEUE_SIZE = 10000 # Disallow sending session cookies over insecure connections SESSION_COOKIE_SECURE = True
JOB_EVENT_MAX_QUEUE_SIZE = 5000
JOB_EVENT_MAX_QUEUE_SIZE = 10000
CHANGE_NUMERIC_LITERAL
[["Update", ["integer:5000", 3, 28, 3, 32], "10000"]]
gconsidine/awx@df249eda2eda69b047c462679320db9e6878e749
Raise default job event worker queue size
[ { "sha": "9f1585c072ba917e52c57184397880772e9fc45a", "filename": "awx/settings/defaults.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/df249eda2eda69b047c462679320db9e6878e749/awx%2Fsettings%2Fdefaults.py", "raw_url": "https://github.com/gconsidine/awx/raw/df249eda2eda69b047c462679320db9e6878e749/awx%2Fsettings%2Fdefaults.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fsettings%2Fdefaults.py?ref=df249eda2eda69b047c462679320db9e6878e749", "patch": "@@ -165,7 +165,7 @@ def IS_TESTING(argv=None):\n \n JOB_EVENT_WORKERS = 4\n \n-JOB_EVENT_MAX_QUEUE_SIZE = 5000\n+JOB_EVENT_MAX_QUEUE_SIZE = 10000\n \n # Disallow sending session cookies over insecure connections\n SESSION_COOKIE_SECURE = True" } ]
awx
df9bb829fcfd1b23a15147c1a8503fb03915389d
fba1fa83d0c33b0d77356049aac641223a327e2a
awx/api/views.py
https://github.com/gconsidine/awx
true
false
true
@@ -2421,7 +2421,7 @@ class JobTemplateSurveySpec(GenericAPIView): raise LicenseForbids(_('Your license does not allow ' 'adding surveys.')) survey_spec = obj.survey_spec - for pos, field in enumerate(survey_spec['spec']): + for pos, field in enumerate(survey_spec.get('spec', [])): if field.get('type') == 'password': if 'default' in field and field['default']: field['default'] = '$encrypted$'
for pos , field in enumerate ( survey_spec [ 'spec' ] ) : if field . get ( 'type' ) == 'password' : if 'default' in field and field [ 'default' ] : field [ 'default' ] = '$encrypted$'
for pos , field in enumerate ( survey_spec . get ( 'spec' , [ ] ) ) : if field . get ( 'type' ) == 'password' : if 'default' in field and field [ 'default' ] : field [ 'default' ] = '$encrypted$'
SINGLE_STMT
[["Insert", ["argument_list", 3, 36, 3, 57], ["call", "N0"], 1], ["Insert", ["argument_list", 3, 36, 3, 57], ["):)", "T"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Move", "N1", ["identifier:survey_spec", 3, 37, 3, 48], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:get", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Move", "N2", ["string:'spec'", 3, 49, 3, 55], 1], ["Insert", "N2", [",:,", "T"], 2], ["Insert", "N2", ["list", "N3"], 3], ["Move", "N2", ["):)", 3, 56, 3, 57], 4], ["Move", "N3", ["[:[", 3, 48, 3, 49], 0], ["Move", "N3", ["]:]", 3, 55, 3, 56], 1], ["Delete", ["subscript", 3, 37, 3, 56]]]
gconsidine/awx@df9bb829fcfd1b23a15147c1a8503fb03915389d
fix KeyError with spec
[ { "sha": "57ede93ded33c67eaa44ded5d5dbe11453655951", "filename": "awx/api/views.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/df9bb829fcfd1b23a15147c1a8503fb03915389d/awx%2Fapi%2Fviews.py", "raw_url": "https://github.com/gconsidine/awx/raw/df9bb829fcfd1b23a15147c1a8503fb03915389d/awx%2Fapi%2Fviews.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fviews.py?ref=df9bb829fcfd1b23a15147c1a8503fb03915389d", "patch": "@@ -2421,7 +2421,7 @@ def get(self, request, *args, **kwargs):\n raise LicenseForbids(_('Your license does not allow '\n 'adding surveys.'))\n survey_spec = obj.survey_spec\n- for pos, field in enumerate(survey_spec['spec']):\n+ for pos, field in enumerate(survey_spec.get('spec', [])):\n if field.get('type') == 'password':\n if 'default' in field and field['default']:\n field['default'] = '$encrypted$'" } ]
awx
710702dffe879069eb52bb358f355f88f5dc4239
4771b2efc028fbb4ffbf16ab1ef7a42f973ff447
awx/main/migrations/0034_v310_release.py
https://github.com/gconsidine/awx
true
false
false
@@ -62,7 +62,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='unifiedjob', name='launch_type', - field=models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency'), (b'workflow', 'Workflow')]), + field=models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency'), (b'workflow', 'Workflow'), (b'sync', 'Sync')]), ), migrations.CreateModel( name='WorkflowJob',
migrations . AlterField ( model_name = 'unifiedjob' , name = 'launch_type' , field = models . CharField ( default = b'manual' , max_length = 20 , editable = False , choices = [ ( b'manual' , 'Manual' ) , ( b'relaunch' , 'Relaunch' ) , ( b'callback' , 'Callback' ) , ( b'scheduled' , 'Scheduled' ) , ( b'dependency' , 'Dependency' ) , ( b'workflow' , 'Workflow' ) ] ) , ) ,
migrations . AlterField ( model_name = 'unifiedjob' , name = 'launch_type' , field = models . CharField ( default = b'manual' , max_length = 20 , editable = False , choices = [ ( b'manual' , 'Manual' ) , ( b'relaunch' , 'Relaunch' ) , ( b'callback' , 'Callback' ) , ( b'scheduled' , 'Scheduled' ) , ( b'dependency' , 'Dependency' ) , ( b'workflow' , 'Workflow' ) , ( b'sync' , 'Sync' ) ] ) , ) ,
ADD_ELEMENTS_TO_ITERABLE
[["Insert", ["list", 3, 94, 3, 258], [",:,", "T"], 12], ["Insert", ["list", 3, 94, 3, 258], ["tuple", "N0"], 13], ["Insert", "N0", ["(:(", "T"], 0], ["Insert", "N0", ["string:b'sync'", "T"], 1], ["Insert", "N0", [",:,", "T"], 2], ["Insert", "N0", ["string:'Sync'", "T"], 3], ["Insert", "N0", ["):)", "T"], 4]]
gconsidine/awx@710702dffe879069eb52bb358f355f88f5dc4239
include added launch_type choice in 3.1 migration
[ { "sha": "d23843a5fe08a10a146c4dc95f85ddc6aa3b41d9", "filename": "awx/main/migrations/0034_v310_release.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/710702dffe879069eb52bb358f355f88f5dc4239/awx%2Fmain%2Fmigrations%2F0034_v310_release.py", "raw_url": "https://github.com/gconsidine/awx/raw/710702dffe879069eb52bb358f355f88f5dc4239/awx%2Fmain%2Fmigrations%2F0034_v310_release.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmigrations%2F0034_v310_release.py?ref=710702dffe879069eb52bb358f355f88f5dc4239", "patch": "@@ -62,7 +62,7 @@ class Migration(migrations.Migration):\n migrations.AlterField(\n model_name='unifiedjob',\n name='launch_type',\n- field=models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency'), (b'workflow', 'Workflow')]),\n+ field=models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency'), (b'workflow', 'Workflow'), (b'sync', 'Sync')]),\n ),\n migrations.CreateModel(\n name='WorkflowJob'," } ]
awx
f347bc5ae40b4c1448fbb96c7f99e44fda9358b5
a13b2588b238c7f6b35f7b8eefa4721a7183e7dd
awx/sso/fields.py
https://github.com/gconsidine/awx
true
false
true
@@ -390,7 +390,7 @@ class BaseDictWithChildField(fields.DictField): child_field = self.child_fields.get(k, None) if child_field: value[k] = child_field.to_representation(v) - elif allow_unknown_keys: + elif self.allow_unknown_keys: value[k] = v return value
if child_field : value [ k ] = child_field . to_representation ( v ) elif allow_unknown_keys : value [ k ] = v
if child_field : value [ k ] = child_field . to_representation ( v ) elif self . allow_unknown_keys : value [ k ] = v
SINGLE_STMT
[["Insert", ["elif_clause", 3, 13, 4, 29], ["attribute", "N0"], 1], ["Insert", "N0", ["identifier:self", "T"], 0], ["Insert", "N0", [".:.", "T"], 1], ["Move", "N0", ["identifier:allow_unknown_keys", 3, 18, 3, 36], 2]]
gconsidine/awx@f347bc5ae40b4c1448fbb96c7f99e44fda9358b5
fix a typo that results in a NameError Resolves #4861
[ { "sha": "5d95296e8e07d903f82483e0eab6437f95bd8e84", "filename": "awx/sso/fields.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/f347bc5ae40b4c1448fbb96c7f99e44fda9358b5/awx%2Fsso%2Ffields.py", "raw_url": "https://github.com/gconsidine/awx/raw/f347bc5ae40b4c1448fbb96c7f99e44fda9358b5/awx%2Fsso%2Ffields.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fsso%2Ffields.py?ref=f347bc5ae40b4c1448fbb96c7f99e44fda9358b5", "patch": "@@ -390,7 +390,7 @@ def to_representation(self, value):\n child_field = self.child_fields.get(k, None)\n if child_field:\n value[k] = child_field.to_representation(v)\n- elif allow_unknown_keys:\n+ elif self.allow_unknown_keys:\n value[k] = v\n return value\n " } ]
awx
755e408cd9beacef9223123162a58d2fc7fe41a3
5ace8dad4e83aadaeb738ded1a546e4d336d699c
awx/main/models/mixins.py
https://github.com/gconsidine/awx
true
false
true
@@ -130,7 +130,7 @@ class SurveyJobTemplateMixin(models.Model): default = survey_element.get('default') variable_key = survey_element.get('variable') if survey_element.get('type') == 'password': - if variable_key in kwargs_extra_vars: + if variable_key in kwargs_extra_vars and default: kw_value = kwargs_extra_vars[variable_key] if kw_value.startswith('$encrypted$') and kw_value != default: kwargs_extra_vars[variable_key] = default
if variable_key in kwargs_extra_vars : kw_value = kwargs_extra_vars [ variable_key ] if kw_value . startswith ( '$encrypted$' ) and kw_value != default : kwargs_extra_vars [ variable_key ] = default
if variable_key in kwargs_extra_vars and default : kw_value = kwargs_extra_vars [ variable_key ] if kw_value . startswith ( '$encrypted$' ) and kw_value != default : kwargs_extra_vars [ variable_key ] = default
MORE_SPECIFIC_IF
[["Insert", ["if_statement", 3, 21, 6, 70], ["boolean_operator", "N0"], 1], ["Move", "N0", ["comparison_operator", 3, 24, 3, 57], 0], ["Insert", "N0", ["and:and", "T"], 1], ["Insert", "N0", ["identifier:default", "T"], 2]]
gconsidine/awx@755e408cd9beacef9223123162a58d2fc7fe41a3
fix fields with no defaults showing in extra_vars display when not provided
[ { "sha": "952377c112b59102ca63e4b9f5e01db48be0a820", "filename": "awx/main/models/mixins.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/755e408cd9beacef9223123162a58d2fc7fe41a3/awx%2Fmain%2Fmodels%2Fmixins.py", "raw_url": "https://github.com/gconsidine/awx/raw/755e408cd9beacef9223123162a58d2fc7fe41a3/awx%2Fmain%2Fmodels%2Fmixins.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmodels%2Fmixins.py?ref=755e408cd9beacef9223123162a58d2fc7fe41a3", "patch": "@@ -130,7 +130,7 @@ def _update_unified_job_kwargs(self, **kwargs):\n default = survey_element.get('default')\n variable_key = survey_element.get('variable')\n if survey_element.get('type') == 'password':\n- if variable_key in kwargs_extra_vars:\n+ if variable_key in kwargs_extra_vars and default:\n kw_value = kwargs_extra_vars[variable_key]\n if kw_value.startswith('$encrypted$') and kw_value != default:\n kwargs_extra_vars[variable_key] = default" } ]
awx
587a843acdd1d3dfecb66a3b18aae3ac435cf85c
81c569e3625d51d97c0b6ebeb13e88beaf00f064
awx/main/tasks.py
https://github.com/gconsidine/awx
true
false
true
@@ -874,7 +874,7 @@ class RunJob(BaseTask): cp_dir = os.path.join(kwargs['private_data_dir'], 'cp') if not os.path.exists(cp_dir): os.mkdir(cp_dir, 0700) - env['ANSIBLE_SSH_CONTROL_PATH'] = os.path.join(cp_dir, 'ansible-ssh-%%h-%%p-%%r') + env['ANSIBLE_SSH_CONTROL_PATH'] = os.path.join(cp_dir, '%%h%%p%%r') # Allow the inventory script to include host variables inline via ['_meta']['hostvars']. env['INVENTORY_HOSTVARS'] = str(True)
env [ 'ANSIBLE_SSH_CONTROL_PATH' ] = os . path . join ( cp_dir , 'ansible-ssh-%%h-%%p-%%r' )
env [ 'ANSIBLE_SSH_CONTROL_PATH' ] = os . path . join ( cp_dir , '%%h%%p%%r' )
CHANGE_STRING_LITERAL
[["Update", ["string:'ansible-ssh-%%h-%%p-%%r'", 3, 64, 3, 89], "'%%h%%p%%r'"]]
gconsidine/awx@587a843acdd1d3dfecb66a3b18aae3ac435cf85c
Shorten the default control persist path Customer needed a few more characters, this should give a bit more breathingroom
[ { "sha": "72348873e596c6fa0ab364ce58b18b484e504b93", "filename": "awx/main/tasks.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/587a843acdd1d3dfecb66a3b18aae3ac435cf85c/awx%2Fmain%2Ftasks.py", "raw_url": "https://github.com/gconsidine/awx/raw/587a843acdd1d3dfecb66a3b18aae3ac435cf85c/awx%2Fmain%2Ftasks.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftasks.py?ref=587a843acdd1d3dfecb66a3b18aae3ac435cf85c", "patch": "@@ -874,7 +874,7 @@ def build_env(self, job, **kwargs):\n cp_dir = os.path.join(kwargs['private_data_dir'], 'cp')\n if not os.path.exists(cp_dir):\n os.mkdir(cp_dir, 0700)\n- env['ANSIBLE_SSH_CONTROL_PATH'] = os.path.join(cp_dir, 'ansible-ssh-%%h-%%p-%%r')\n+ env['ANSIBLE_SSH_CONTROL_PATH'] = os.path.join(cp_dir, '%%h%%p%%r')\n \n # Allow the inventory script to include host variables inline via ['_meta']['hostvars'].\n env['INVENTORY_HOSTVARS'] = str(True)" } ]
awx
6d201c44d97a46ee6627a596c9d6bea0bffa6306
f175fbba233a434a99d1215fb24acad3c655ae17
awx/main/tests/unit/isolated/test_expect.py
https://github.com/gconsidine/awx
true
false
true
@@ -272,7 +272,7 @@ def test_check_isolated_job(private_data_dir, rsa_key): '-e', '{"src": "%s"}' % private_data_dir, '-vvvvv' ], - '/tower_devel/awx/playbooks', mgr.management_env, mock.ANY, + '/awx_devel/awx/playbooks', mgr.management_env, mock.ANY, cancelled_callback=None, idle_timeout=0, job_timeout=0,
tower_devel / awx / playbooks ' , mgr . management_env , mock . ANY ,
awx_devel / awx / playbooks ' , mgr . management_env , mock . ANY ,
CHANGE_BINARY_OPERAND
[["Update", ["identifier:tower_devel", 3, 15, 3, 26], "awx_devel"]]
gconsidine/awx@6d201c44d97a46ee6627a596c9d6bea0bffa6306
fix a busted unit test re: tower -> awx
[ { "sha": "adf43d4bb9e712b3087d0f3b69d5a07b1a9cf096", "filename": "awx/main/tests/unit/isolated/test_expect.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/6d201c44d97a46ee6627a596c9d6bea0bffa6306/awx%2Fmain%2Ftests%2Funit%2Fisolated%2Ftest_expect.py", "raw_url": "https://github.com/gconsidine/awx/raw/6d201c44d97a46ee6627a596c9d6bea0bffa6306/awx%2Fmain%2Ftests%2Funit%2Fisolated%2Ftest_expect.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftests%2Funit%2Fisolated%2Ftest_expect.py?ref=6d201c44d97a46ee6627a596c9d6bea0bffa6306", "patch": "@@ -272,7 +272,7 @@ def _synchronize_job_artifacts(args, cwd, env, buff, **kw):\n '-e', '{\"src\": \"%s\"}' % private_data_dir,\n '-vvvvv'\n ],\n- '/tower_devel/awx/playbooks', mgr.management_env, mock.ANY,\n+ '/awx_devel/awx/playbooks', mgr.management_env, mock.ANY,\n cancelled_callback=None,\n idle_timeout=0,\n job_timeout=0," } ]
awx
83dca62e161b18a9ad97ccf9edee9d02985e41ca
39eab0271317e80b27e6e967225c97b948f30a20
awx/api/serializers.py
https://github.com/gconsidine/awx
true
false
false
@@ -91,7 +91,7 @@ SUMMARIZABLE_FK_FIELDS = { 'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed',), 'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud'), 'vault_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud'), - 'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed'), + 'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed', 'job_type'), 'job_template': DEFAULT_SUMMARY_FIELDS, 'workflow_job_template': DEFAULT_SUMMARY_FIELDS, 'workflow_job': DEFAULT_SUMMARY_FIELDS,
DEFAULT_SUMMARY_FIELDS + ( 'status' , 'failed' , 'elapsed' ) ,
DEFAULT_SUMMARY_FIELDS + ( 'status' , 'failed' , 'elapsed' , 'job_type' ) ,
ADD_ELEMENTS_TO_ITERABLE
[["Insert", ["tuple", 3, 37, 3, 68], [",:,", "T"], 6], ["Insert", ["tuple", 3, 37, 3, 68], ["string:'job_type'", "T"], 7]]
gconsidine/awx@83dca62e161b18a9ad97ccf9edee9d02985e41ca
add job_type to job summary fields
[ { "sha": "dd94ae4e2650716ac69e129e8c341832fd120025", "filename": "awx/api/serializers.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/83dca62e161b18a9ad97ccf9edee9d02985e41ca/awx%2Fapi%2Fserializers.py", "raw_url": "https://github.com/gconsidine/awx/raw/83dca62e161b18a9ad97ccf9edee9d02985e41ca/awx%2Fapi%2Fserializers.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fserializers.py?ref=83dca62e161b18a9ad97ccf9edee9d02985e41ca", "patch": "@@ -91,7 +91,7 @@\n 'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed',),\n 'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud'),\n 'vault_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud'),\n- 'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed'),\n+ 'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed', 'job_type'),\n 'job_template': DEFAULT_SUMMARY_FIELDS,\n 'workflow_job_template': DEFAULT_SUMMARY_FIELDS,\n 'workflow_job': DEFAULT_SUMMARY_FIELDS," } ]
awx
24d173c74f09c728f4c67782a18b402ea60fb860
53581b1c5b6a2c7ed83d6c39a165cbfb1e3fd534
awx/api/serializers.py
https://github.com/gconsidine/awx
true
false
false
@@ -91,7 +91,7 @@ SUMMARIZABLE_FK_FIELDS = { 'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed',), 'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud'), 'vault_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud'), - 'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed', 'job_type'), + 'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed'), 'job_template': DEFAULT_SUMMARY_FIELDS, 'workflow_job_template': DEFAULT_SUMMARY_FIELDS, 'workflow_job': DEFAULT_SUMMARY_FIELDS,
DEFAULT_SUMMARY_FIELDS + ( 'status' , 'failed' , 'elapsed' , 'job_type' ) ,
DEFAULT_SUMMARY_FIELDS + ( 'status' , 'failed' , 'elapsed' ) ,
CHANGE_BINARY_OPERAND
[["Delete", [",:,", 3, 67, 3, 68]], ["Delete", ["string:'job_type'", 3, 69, 3, 79]]]
gconsidine/awx@24d173c74f09c728f4c67782a18b402ea60fb860
Revert "add job_type to job summary fields"
[ { "sha": "b6f6e86230c729663efcd7cd3486bd1af858e9c1", "filename": "awx/api/serializers.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/24d173c74f09c728f4c67782a18b402ea60fb860/awx%2Fapi%2Fserializers.py", "raw_url": "https://github.com/gconsidine/awx/raw/24d173c74f09c728f4c67782a18b402ea60fb860/awx%2Fapi%2Fserializers.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fserializers.py?ref=24d173c74f09c728f4c67782a18b402ea60fb860", "patch": "@@ -91,7 +91,7 @@\n 'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed',),\n 'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud'),\n 'vault_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud'),\n- 'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed', 'job_type'),\n+ 'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed'),\n 'job_template': DEFAULT_SUMMARY_FIELDS,\n 'workflow_job_template': DEFAULT_SUMMARY_FIELDS,\n 'workflow_job': DEFAULT_SUMMARY_FIELDS," } ]
awx
f13c56e3d96697b9b521d896089fba94679dd8aa
af3652bb2fa94394e70a5f6663e2954c8fcb4092
setup.py
https://github.com/gconsidine/awx
true
false
false
@@ -16,7 +16,7 @@ etcpath = "/etc/tower" homedir = "/var/lib/awx" bindir = "/usr/bin" sharedir = "/usr/share/awx" -docdir = "/usr/share/doc/ansible-awx" +docdir = "/usr/share/doc/awx" def get_version():
docdir = "/usr/share/doc/ansible-awx"
docdir = "/usr/share/doc/awx"
CHANGE_STRING_LITERAL
[["Update", ["string:\"/usr/share/doc/ansible-awx\"", 3, 10, 3, 38], "\"/usr/share/doc/awx\""]]
gconsidine/awx@f13c56e3d96697b9b521d896089fba94679dd8aa
Fix docdir location
[ { "sha": "154caffca574e56b9f82e732340c1cc45af6e523", "filename": "setup.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/f13c56e3d96697b9b521d896089fba94679dd8aa/setup.py", "raw_url": "https://github.com/gconsidine/awx/raw/f13c56e3d96697b9b521d896089fba94679dd8aa/setup.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/setup.py?ref=f13c56e3d96697b9b521d896089fba94679dd8aa", "patch": "@@ -16,7 +16,7 @@\n homedir = \"/var/lib/awx\"\n bindir = \"/usr/bin\"\n sharedir = \"/usr/share/awx\"\n-docdir = \"/usr/share/doc/ansible-awx\"\n+docdir = \"/usr/share/doc/awx\"\n \n \n def get_version():" } ]
awx
4b830e117612d6d73192e531caa7de8e138b009e
144e61c874b51934192b4fcba7f2b20df3dffdcb
awx/main/tasks.py
https://github.com/gconsidine/awx
true
false
true
@@ -992,7 +992,7 @@ class RunJob(BaseTask): env['ANSIBLE_LIBRARY'] = self.get_path_to('..', 'plugins', 'library') env['ANSIBLE_CACHE_PLUGINS'] = self.get_path_to('..', 'plugins', 'fact_caching') env['ANSIBLE_CACHE_PLUGIN'] = "awx" - env['ANSIBLE_FACT_CACHE_TIMEOUT'] = str(settings.ANSIBLE_FACT_CACHE_TIMEOUT) + env['ANSIBLE_CACHE_PLUGIN_TIMEOUT'] = str(settings.ANSIBLE_FACT_CACHE_TIMEOUT) env['ANSIBLE_CACHE_PLUGIN_CONNECTION'] = settings.CACHES['default']['LOCATION'] if 'LOCATION' in settings.CACHES['default'] else '' if job.project: env['PROJECT_REVISION'] = job.project.scm_revision
env [ 'ANSIBLE_FACT_CACHE_TIMEOUT' ] = str ( settings . ANSIBLE_FACT_CACHE_TIMEOUT )
env [ 'ANSIBLE_CACHE_PLUGIN_TIMEOUT' ] = str ( settings . ANSIBLE_FACT_CACHE_TIMEOUT )
CHANGE_STRING_LITERAL
[["Update", ["string:'ANSIBLE_FACT_CACHE_TIMEOUT'", 3, 17, 3, 45], "'ANSIBLE_CACHE_PLUGIN_TIMEOUT'"]]
gconsidine/awx@4b830e117612d6d73192e531caa7de8e138b009e
fix fact cache timeout env var key name
[ { "sha": "ec2ef9e5d7e2e692e5ec11ae3180fcbd32dc0af0", "filename": "awx/main/tasks.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/4b830e117612d6d73192e531caa7de8e138b009e/awx%2Fmain%2Ftasks.py", "raw_url": "https://github.com/gconsidine/awx/raw/4b830e117612d6d73192e531caa7de8e138b009e/awx%2Fmain%2Ftasks.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftasks.py?ref=4b830e117612d6d73192e531caa7de8e138b009e", "patch": "@@ -992,7 +992,7 @@ def build_env(self, job, **kwargs):\n env['ANSIBLE_LIBRARY'] = self.get_path_to('..', 'plugins', 'library')\n env['ANSIBLE_CACHE_PLUGINS'] = self.get_path_to('..', 'plugins', 'fact_caching')\n env['ANSIBLE_CACHE_PLUGIN'] = \"awx\"\n- env['ANSIBLE_FACT_CACHE_TIMEOUT'] = str(settings.ANSIBLE_FACT_CACHE_TIMEOUT)\n+ env['ANSIBLE_CACHE_PLUGIN_TIMEOUT'] = str(settings.ANSIBLE_FACT_CACHE_TIMEOUT)\n env['ANSIBLE_CACHE_PLUGIN_CONNECTION'] = settings.CACHES['default']['LOCATION'] if 'LOCATION' in settings.CACHES['default'] else ''\n if job.project:\n env['PROJECT_REVISION'] = job.project.scm_revision" } ]
awx
61aa7c1ad7f15e0fc2413b685d56f0b24bf59568
2eaa00b6333e2a4cb0c89fa7a5d03238b9ebd8e5
awx/main/models/unified_jobs.py
https://github.com/gconsidine/awx
true
false
true
@@ -806,7 +806,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique try: return os.stat(self.result_stdout_file).st_size except: - return 0 + return len(self.result_stdout) def _result_stdout_raw_limited(self, start_line=0, end_line=None, redact_sensitive=True, escape_ascii=False): return_buffer = u""
return 0
return len ( self . result_stdout )
SINGLE_STMT
[["Insert", ["return_statement", 3, 13, 3, 21], ["call", "N0"], 1], ["Insert", "N0", ["identifier:len", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Insert", "N1", ["attribute", "N2"], 1], ["Insert", "N1", ["):)", "T"], 2], ["Insert", "N2", ["identifier:self", "T"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:result_stdout", "T"], 2], ["Delete", ["integer:0", 3, 20, 3, 21]]]
gconsidine/awx@61aa7c1ad7f15e0fc2413b685d56f0b24bf59568
return proper stdout size for DB-backed stdout (like system jobs) see: https://github.com/ansible/ansible-tower/issues/7398
[ { "sha": "055ab4c9c11305b44d00bc34620f62851342b9d0", "filename": "awx/main/models/unified_jobs.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/61aa7c1ad7f15e0fc2413b685d56f0b24bf59568/awx%2Fmain%2Fmodels%2Funified_jobs.py", "raw_url": "https://github.com/gconsidine/awx/raw/61aa7c1ad7f15e0fc2413b685d56f0b24bf59568/awx%2Fmain%2Fmodels%2Funified_jobs.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmodels%2Funified_jobs.py?ref=61aa7c1ad7f15e0fc2413b685d56f0b24bf59568", "patch": "@@ -806,7 +806,7 @@ def result_stdout_size(self):\n try:\n return os.stat(self.result_stdout_file).st_size\n except:\n- return 0\n+ return len(self.result_stdout)\n \n def _result_stdout_raw_limited(self, start_line=0, end_line=None, redact_sensitive=True, escape_ascii=False):\n return_buffer = u\"\"" } ]
awx
4e2f075fa6dad22dccd35e97a3b697a451527c55
6a862269b7c0e844b9cb847382a63caf6cca9644
awx/main/models/inventory.py
https://github.com/gconsidine/awx
true
false
true
@@ -1400,7 +1400,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions): Q(inventory=self.inventory, update_on_project_update=True, source='scm') & ~Q(id=self.id)).exists(): - raise ValidationError(_("More than one SCM-based inventory source with update on project update on per-inventory not allowed.")) + raise ValidationError(_("More than one SCM-based inventory source with update on project update per-inventory not allowed.")) return self.update_on_project_update def clean_update_on_launch(self):
Q ( inventory = self . inventory , update_on_project_update = True , source = 'scm' ) & ~ Q ( id = self . id ) ) . exists ( ) : raise ValidationError ( _ ( "More than one SCM-based inventory source with update on project update on per-inventory not allowed." ) )
Q ( inventory = self . inventory , update_on_project_update = True , source = 'scm' ) & ~ Q ( id = self . id ) ) . exists ( ) : raise ValidationError ( _ ( "More than one SCM-based inventory source with update on project update per-inventory not allowed." ) )
CHANGE_STRING_LITERAL
[["Update", ["string:\"More than one SCM-based inventory source with update on project update on per-inventory not allowed.\"", 3, 37, 3, 139], "\"More than one SCM-based inventory source with update on project update per-inventory not allowed.\""]]
gconsidine/awx@4e2f075fa6dad22dccd35e97a3b697a451527c55
better scm inv restriction error message
[ { "sha": "577d8615a109ec2b8cf1af341966b61ffaa4222d", "filename": "awx/main/models/inventory.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/4e2f075fa6dad22dccd35e97a3b697a451527c55/awx%2Fmain%2Fmodels%2Finventory.py", "raw_url": "https://github.com/gconsidine/awx/raw/4e2f075fa6dad22dccd35e97a3b697a451527c55/awx%2Fmain%2Fmodels%2Finventory.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmodels%2Finventory.py?ref=4e2f075fa6dad22dccd35e97a3b697a451527c55", "patch": "@@ -1400,7 +1400,7 @@ def clean_update_on_project_update(self):\n Q(inventory=self.inventory,\n update_on_project_update=True, source='scm') & \n ~Q(id=self.id)).exists():\n- raise ValidationError(_(\"More than one SCM-based inventory source with update on project update on per-inventory not allowed.\"))\n+ raise ValidationError(_(\"More than one SCM-based inventory source with update on project update per-inventory not allowed.\"))\n return self.update_on_project_update\n \n def clean_update_on_launch(self):" } ]
awx
5f4d033836e4ac82f8a959ac9e33fe2e1ad3b00c
9bb6ec28fe136004f44559c7d53aab591963c15e
awx/main/tests/functional/api/test_inventory.py
https://github.com/gconsidine/awx
true
false
true
@@ -405,7 +405,7 @@ class TestControlledBySCM: admin_user, expect=400) content = json.loads(res.content) assert content['update_on_project_update'] == ["More than one SCM-based inventory source with update on project update " - "on per-inventory not allowed."] + "per-inventory not allowed."] def test_adding_inv_src_without_proj_access_prohibited(self, post, project, inventory, rando): inventory.admin_role.members.add(rando)
assert content [ 'update_on_project_update' ] == [ "More than one SCM-based inventory source with update on project update " "on per-inventory not allowed." ]
assert content [ 'update_on_project_update' ] == [ "More than one SCM-based inventory source with update on project update " "per-inventory not allowed." ]
CHANGE_STRING_LITERAL
[["Update", ["string:\"on per-inventory not allowed.\"", 3, 56, 3, 87], "\"per-inventory not allowed.\""]]
gconsidine/awx@5f4d033836e4ac82f8a959ac9e33fe2e1ad3b00c
change test to match with recent error message change
[ { "sha": "8f6f6a6f222704074fa1d20eb9adb5f22270b5ad", "filename": "awx/main/tests/functional/api/test_inventory.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/5f4d033836e4ac82f8a959ac9e33fe2e1ad3b00c/awx%2Fmain%2Ftests%2Ffunctional%2Fapi%2Ftest_inventory.py", "raw_url": "https://github.com/gconsidine/awx/raw/5f4d033836e4ac82f8a959ac9e33fe2e1ad3b00c/awx%2Fmain%2Ftests%2Ffunctional%2Fapi%2Ftest_inventory.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftests%2Ffunctional%2Fapi%2Ftest_inventory.py?ref=5f4d033836e4ac82f8a959ac9e33fe2e1ad3b00c", "patch": "@@ -405,7 +405,7 @@ def test_two_update_on_project_update_inv_src_prohibited(self, patch, scm_invent\n admin_user, expect=400)\n content = json.loads(res.content)\n assert content['update_on_project_update'] == [\"More than one SCM-based inventory source with update on project update \"\n- \"on per-inventory not allowed.\"]\n+ \"per-inventory not allowed.\"]\n \n def test_adding_inv_src_without_proj_access_prohibited(self, post, project, inventory, rando):\n inventory.admin_role.members.add(rando)" } ]
awx
e23539e69b3f01288e439c683d4e9c528a5dd482
8eb69b96d1e720fa2c5f42626080665e1608fd9e
awx/main/management/commands/create_default_org.py
https://github.com/gconsidine/awx
true
false
true
@@ -23,5 +23,5 @@ class Command(BaseCommand): except IndexError: superuser = None with impersonate(superuser): - org = Organization.objects.create(name='Default') + Organization.objects.create(name='Default') print('Default organization added.')
org = Organization . objects . create ( name = 'Default' )
Organization . objects . create ( name = 'Default' )
SINGLE_STMT
[["Move", ["expression_statement", 3, 13, 3, 62], ["call", 3, 19, 3, 62], 0], ["Delete", ["identifier:org", 3, 13, 3, 16]], ["Delete", ["=:=", 3, 17, 3, 18]], ["Delete", ["assignment", 3, 13, 3, 62]]]
gconsidine/awx@e23539e69b3f01288e439c683d4e9c528a5dd482
Fix flake8 warning.
[ { "sha": "0fa57424f4edaaae075594fe02b9f635c1c8a031", "filename": "awx/main/management/commands/create_default_org.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/e23539e69b3f01288e439c683d4e9c528a5dd482/awx%2Fmain%2Fmanagement%2Fcommands%2Fcreate_default_org.py", "raw_url": "https://github.com/gconsidine/awx/raw/e23539e69b3f01288e439c683d4e9c528a5dd482/awx%2Fmain%2Fmanagement%2Fcommands%2Fcreate_default_org.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmanagement%2Fcommands%2Fcreate_default_org.py?ref=e23539e69b3f01288e439c683d4e9c528a5dd482", "patch": "@@ -23,5 +23,5 @@ def handle(self, *args, **kwargs):\n except IndexError:\n superuser = None\n with impersonate(superuser):\n- org = Organization.objects.create(name='Default')\n+ Organization.objects.create(name='Default')\n print('Default organization added.')" } ]
awx
c6c80447a643f48fbdb1afbe0ff593d4368db7dd
419bc3dd1c74b42ee4b5e29789a93975621575d1
setup.py
https://github.com/gconsidine/awx
true
false
false
@@ -129,7 +129,7 @@ setup( "tools/munin_monitors/redis_alive", "tools/munin_monitors/socketio_alive", "tools/munin_monitors/taskmanager_alive", - "tools/munin_monitors/mongo_cocnn", + "tools/munin_monitors/mongo_conn", "tools/munin_monitors/mongo_docs", "tools/munin_monitors/mongo_mem", "tools/munin_monitors/mongo_ops"]),
"tools/munin_monitors/mongo_cocnn" ,
"tools/munin_monitors/mongo_conn" ,
CHANGE_STRING_LITERAL
[["Update", ["string:\"tools/munin_monitors/mongo_cocnn\"", 3, 37, 3, 71], "\"tools/munin_monitors/mongo_conn\""]]
gconsidine/awx@c6c80447a643f48fbdb1afbe0ff593d4368db7dd
Fix a spelling error in the setup script that's causing rpm build failures
[ { "sha": "9b697653faffcd4a5c89593d96b76cc614fd2f8e", "filename": "setup.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/c6c80447a643f48fbdb1afbe0ff593d4368db7dd/setup.py", "raw_url": "https://github.com/gconsidine/awx/raw/c6c80447a643f48fbdb1afbe0ff593d4368db7dd/setup.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/setup.py?ref=c6c80447a643f48fbdb1afbe0ff593d4368db7dd", "patch": "@@ -129,7 +129,7 @@ def proc_data_files(data_files):\n \"tools/munin_monitors/redis_alive\",\n \"tools/munin_monitors/socketio_alive\",\n \"tools/munin_monitors/taskmanager_alive\",\n- \"tools/munin_monitors/mongo_cocnn\",\n+ \"tools/munin_monitors/mongo_conn\",\n \"tools/munin_monitors/mongo_docs\",\n \"tools/munin_monitors/mongo_mem\",\n \"tools/munin_monitors/mongo_ops\"])," } ]
awx
81611b44f66bbffaf4f060f78c0d517063b7035c
51bb85c3e21a20765c1c86d12c2ca920a6160e71
awx/api/views.py
https://github.com/gconsidine/awx
true
false
false
@@ -568,7 +568,7 @@ class InstanceGroupUnifiedJobsList(SubListAPIView): model = UnifiedJob serializer_class = UnifiedJobSerializer parent_model = InstanceGroup - relationship = "instance_group" + relationship = "unifiedjob_set" new_in_320 = True
relationship = "instance_group"
relationship = "unifiedjob_set"
CHANGE_STRING_LITERAL
[["Update", ["string:\"instance_group\"", 3, 20, 3, 36], "\"unifiedjob_set\""]]
gconsidine/awx@81611b44f66bbffaf4f060f78c0d517063b7035c
Fix an incorrect reference on instance group jobs list
[ { "sha": "78656619238b982f9496f073dea11eda8c419f4b", "filename": "awx/api/views.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/81611b44f66bbffaf4f060f78c0d517063b7035c/awx%2Fapi%2Fviews.py", "raw_url": "https://github.com/gconsidine/awx/raw/81611b44f66bbffaf4f060f78c0d517063b7035c/awx%2Fapi%2Fviews.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fviews.py?ref=81611b44f66bbffaf4f060f78c0d517063b7035c", "patch": "@@ -568,7 +568,7 @@ class InstanceGroupUnifiedJobsList(SubListAPIView):\n model = UnifiedJob\n serializer_class = UnifiedJobSerializer\n parent_model = InstanceGroup\n- relationship = \"instance_group\"\n+ relationship = \"unifiedjob_set\"\n new_in_320 = True\n \n " } ]
awx
1ba71acf07e3b657b0f9d217b510213f74862b68
754f698b9b9ddac0d9c4a0021fe3128ac8cb4c9b
awx/main/access.py
https://github.com/gconsidine/awx
true
false
true
@@ -1307,7 +1307,7 @@ class JobTemplateAccess(BaseAccess): def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs): if relationship == "instance_groups": return self.can_attach(obj, sub_obj, relationship, *args, **kwargs) - return super(InventoryAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs) + return super(JobTemplateAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
return super ( InventoryAccess , self ) . can_attach ( obj , sub_obj , relationship , * args , ** kwargs )
return super ( JobTemplateAccess , self ) . can_attach ( obj , sub_obj , relationship , * args , ** kwargs )
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:InventoryAccess", 3, 22, 3, 37], "JobTemplateAccess"]]
gconsidine/awx@1ba71acf07e3b657b0f9d217b510213f74862b68
fix a typo causing tests to fail
[ { "sha": "01165191f0d639c5ca9435d7849f4fd09e3fb5d4", "filename": "awx/main/access.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/1ba71acf07e3b657b0f9d217b510213f74862b68/awx%2Fmain%2Faccess.py", "raw_url": "https://github.com/gconsidine/awx/raw/1ba71acf07e3b657b0f9d217b510213f74862b68/awx%2Fmain%2Faccess.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Faccess.py?ref=1ba71acf07e3b657b0f9d217b510213f74862b68", "patch": "@@ -1307,7 +1307,7 @@ def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=F\n def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):\n if relationship == \"instance_groups\":\n return self.can_attach(obj, sub_obj, relationship, *args, **kwargs)\n- return super(InventoryAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)\n+ return super(JobTemplateAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)\n \n \n " } ]
awx
901c77bcfbefe23b69a3035feced5759ad5cf92b
fc6630fd25f40c83e86fb1e96d65b5ae77702632
awx/main/scheduler/__init__.py
https://github.com/gconsidine/awx
true
false
true
@@ -239,7 +239,7 @@ class TaskManager(): return project_task def create_inventory_update(self, task, inventory_source_task): - inventory_task = InventorySource.objects.get(id=inventory_source_task.id).create_inventory_update(launch_type='inventory_taskendency') + inventory_task = InventorySource.objects.get(id=inventory_source_task.id).create_inventory_update(launch_type='dependency') inventory_task.created = task.created - timedelta(seconds=2) inventory_task.status = 'pending'
inventory_task = InventorySource . objects . get ( id = inventory_source_task . id ) . create_inventory_update ( launch_type = 'inventory_taskendency' )
inventory_task = InventorySource . objects . get ( id = inventory_source_task . id ) . create_inventory_update ( launch_type = 'dependency' )
CHANGE_STRING_LITERAL
[["Update", ["string:'inventory_taskendency'", 3, 119, 3, 142], "'dependency'"]]
gconsidine/awx@901c77bcfbefe23b69a3035feced5759ad5cf92b
dependent IU launch_type reduced to 'dependency'
[ { "sha": "b34607bf3e5bce7d657ffe7ec157825ce1f68331", "filename": "awx/main/scheduler/__init__.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/901c77bcfbefe23b69a3035feced5759ad5cf92b/awx%2Fmain%2Fscheduler%2F__init__.py", "raw_url": "https://github.com/gconsidine/awx/raw/901c77bcfbefe23b69a3035feced5759ad5cf92b/awx%2Fmain%2Fscheduler%2F__init__.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fscheduler%2F__init__.py?ref=901c77bcfbefe23b69a3035feced5759ad5cf92b", "patch": "@@ -239,7 +239,7 @@ def create_project_update(self, task):\n return project_task\n \n def create_inventory_update(self, task, inventory_source_task):\n- inventory_task = InventorySource.objects.get(id=inventory_source_task.id).create_inventory_update(launch_type='inventory_taskendency')\n+ inventory_task = InventorySource.objects.get(id=inventory_source_task.id).create_inventory_update(launch_type='dependency')\n \n inventory_task.created = task.created - timedelta(seconds=2)\n inventory_task.status = 'pending'" } ]
awx
309452775952a23e4e0defb33ee1d5bfd2171bb7
518baee5ba4848861d9b4dd79b81ae6cbe6de678
awx/sso/backends.py
https://github.com/gconsidine/awx
true
false
true
@@ -194,7 +194,7 @@ class TACACSPlusBackend(object): timeout=django_settings.TACACSPLUS_SESSION_TIMEOUT, ).authenticate( username.encode('utf-8'), password.encode('utf-8'), - tacacs_plus.TAC_PLUS_AUTHEN_TYPES[django_settings.TACACSPLUS_AUTH_PROTOCOL], + authen_type=tacacs_plus.TAC_PLUS_AUTHEN_TYPES[django_settings.TACACSPLUS_AUTH_PROTOCOL], ) except Exception as e: logger.exception("TACACS+ Authentication Error: %s" % (e.message,))
authenticate ( username . encode ( 'utf-8' ) , password . encode ( 'utf-8' ) , tacacs_plus . TAC_PLUS_AUTHEN_TYPES [ django_settings . TACACSPLUS_AUTH_PROTOCOL ] , )
authenticate ( username . encode ( 'utf-8' ) , password . encode ( 'utf-8' ) , authen_type = tacacs_plus . TAC_PLUS_AUTHEN_TYPES [ django_settings . TACACSPLUS_AUTH_PROTOCOL ] , )
SINGLE_STMT
[["Insert", ["argument_list", 1, 27, 4, 14], ["keyword_argument", "N0"], 5], ["Insert", "N0", ["identifier:authen_type", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Move", "N0", ["subscript", 3, 17, 3, 92], 2]]
gconsidine/awx@309452775952a23e4e0defb33ee1d5bfd2171bb7
Fix usage of tacacs API
[ { "sha": "1dd42fcfd9e0621cbfcc4727a739902a3ff20cfd", "filename": "awx/sso/backends.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/309452775952a23e4e0defb33ee1d5bfd2171bb7/awx%2Fsso%2Fbackends.py", "raw_url": "https://github.com/gconsidine/awx/raw/309452775952a23e4e0defb33ee1d5bfd2171bb7/awx%2Fsso%2Fbackends.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fsso%2Fbackends.py?ref=309452775952a23e4e0defb33ee1d5bfd2171bb7", "patch": "@@ -194,7 +194,7 @@ def authenticate(self, username, password):\n timeout=django_settings.TACACSPLUS_SESSION_TIMEOUT,\n ).authenticate(\n username.encode('utf-8'), password.encode('utf-8'),\n- tacacs_plus.TAC_PLUS_AUTHEN_TYPES[django_settings.TACACSPLUS_AUTH_PROTOCOL],\n+ authen_type=tacacs_plus.TAC_PLUS_AUTHEN_TYPES[django_settings.TACACSPLUS_AUTH_PROTOCOL],\n )\n except Exception as e:\n logger.exception(\"TACACS+ Authentication Error: %s\" % (e.message,))" } ]
awx
9445fed340912c96c6d1035c7b3b23d0fc4578cc
b1d4c44c511f9b2a05b4345970c84c05cc4a1878
awx/main/validators.py
https://github.com/gconsidine/awx
true
false
true
@@ -57,7 +57,7 @@ def validate_pem(data, min_keys=0, max_keys=None, min_certs=0, max_certs=None): while data: match = pem_obj_re.match(data) if not match: - raise ValidationError(_('Invalid certificate or key: %r...') % data[:100]) + raise ValidationError(_('Invalid certificate or key: %s...') % data[:100]) data = match.group(4).lstrip() # Check PEM object type, check key type if private key.
raise ValidationError ( _ ( 'Invalid certificate or key: %r...' ) % data [ : 100 ] )
raise ValidationError ( _ ( 'Invalid certificate or key: %s...' ) % data [ : 100 ] )
CHANGE_STRING_LITERAL
[["Update", ["string:'Invalid certificate or key: %r...'", 3, 37, 3, 72], "'Invalid certificate or key: %s...'"]]
gconsidine/awx@9445fed340912c96c6d1035c7b3b23d0fc4578cc
use str() - not repr() - for invalid SSH cert error messages see: #6417
[ { "sha": "2acd970c158630ca8c53754081d2e6eca12c01c6", "filename": "awx/main/validators.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/9445fed340912c96c6d1035c7b3b23d0fc4578cc/awx%2Fmain%2Fvalidators.py", "raw_url": "https://github.com/gconsidine/awx/raw/9445fed340912c96c6d1035c7b3b23d0fc4578cc/awx%2Fmain%2Fvalidators.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fvalidators.py?ref=9445fed340912c96c6d1035c7b3b23d0fc4578cc", "patch": "@@ -57,7 +57,7 @@ def validate_pem(data, min_keys=0, max_keys=None, min_certs=0, max_certs=None):\n while data:\n match = pem_obj_re.match(data)\n if not match:\n- raise ValidationError(_('Invalid certificate or key: %r...') % data[:100])\n+ raise ValidationError(_('Invalid certificate or key: %s...') % data[:100])\n data = match.group(4).lstrip()\n \n # Check PEM object type, check key type if private key." } ]
awx
ad95917db6689a4ff8bd4478f51dde88fa7b67e1
e9235d8b54c145666f538ff13bccb99b009bccef
awx/main/tests/functional/api/test_inventory.py
https://github.com/gconsidine/awx
true
false
true
@@ -59,7 +59,7 @@ def test_async_inventory_duplicate_deletion_prevention(delete, get, inventory, a resp = delete(reverse('api:inventory_detail', kwargs={'pk': inventory.id}), alice) assert resp.status_code == 400 - assert resp.data['error'] == 'Inventory is already being deleted.' + assert resp.data['error'] == 'Inventory is already pending deletion.'
assert resp . data [ 'error' ] == 'Inventory is already being deleted.'
assert resp . data [ 'error' ] == 'Inventory is already pending deletion.'
CHANGE_STRING_LITERAL
[["Update", ["string:'Inventory is already being deleted.'", 3, 34, 3, 71], "'Inventory is already pending deletion.'"]]
gconsidine/awx@ad95917db6689a4ff8bd4478f51dde88fa7b67e1
fix tests
[ { "sha": "1cd93182f9720f6239df0fc9470d4ec4f0f0e7d4", "filename": "awx/main/tests/functional/api/test_inventory.py", "status": "modified", "additions": 3, "deletions": 3, "changes": 6, "blob_url": "https://github.com/gconsidine/awx/blob/ad95917db6689a4ff8bd4478f51dde88fa7b67e1/awx%2Fmain%2Ftests%2Ffunctional%2Fapi%2Ftest_inventory.py", "raw_url": "https://github.com/gconsidine/awx/raw/ad95917db6689a4ff8bd4478f51dde88fa7b67e1/awx%2Fmain%2Ftests%2Ffunctional%2Fapi%2Ftest_inventory.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftests%2Ffunctional%2Fapi%2Ftest_inventory.py?ref=ad95917db6689a4ff8bd4478f51dde88fa7b67e1", "patch": "@@ -59,7 +59,7 @@ def test_async_inventory_duplicate_deletion_prevention(delete, get, inventory, a\n \n resp = delete(reverse('api:inventory_detail', kwargs={'pk': inventory.id}), alice)\n assert resp.status_code == 400\n- assert resp.data['error'] == 'Inventory is already being deleted.'\n+ assert resp.data['error'] == 'Inventory is already pending deletion.'\n \n \n @pytest.mark.parametrize('order_by', ('script', '-script', 'script,pk', '-script,pk'))\n@@ -314,12 +314,12 @@ def test_no_post_in_options(self, options, scm_inventory, admin_user):\n @pytest.mark.django_db\n class TestInsightsCredential:\n def test_insights_credential(self, patch, insights_inventory, admin_user, insights_credential):\n- patch(insights_inventory.get_absolute_url(), \n+ patch(insights_inventory.get_absolute_url(),\n {'insights_credential': insights_credential.id}, admin_user,\n expect=200)\n \n def test_non_insights_credential(self, patch, insights_inventory, admin_user, scm_credential):\n- patch(insights_inventory.get_absolute_url(), \n+ patch(insights_inventory.get_absolute_url(),\n {'insights_credential': scm_credential.id}, admin_user,\n expect=400)\n " } ]
awx
668bce8212f5b45360a31f9cbd1928db16a35283
413976f2f7621c7af88a86f865a07b56ecb5e7aa
awx/main/models/unified_jobs.py
https://github.com/gconsidine/awx
true
false
true
@@ -712,7 +712,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique result = super(UnifiedJob, self).save(*args, **kwargs) # If status changed, update the parent instance. - if self.status != status_before: + if self.status != status_before and self.status != 'pending': self._update_parent_instance() # Done.
if self . status != status_before : self . _update_parent_instance ( )
if self . status != status_before and self . status != 'pending' : self . _update_parent_instance ( )
MORE_SPECIFIC_IF
[["Insert", ["if_statement", 3, 9, 4, 43], ["boolean_operator", "N0"], 1], ["Move", "N0", ["comparison_operator", 3, 12, 3, 40], 0], ["Insert", "N0", ["and:and", "T"], 1], ["Insert", "N0", ["comparison_operator", "N1"], 2], ["Insert", "N1", ["attribute", "N2"], 0], ["Insert", "N1", ["!=:!=", "T"], 1], ["Insert", "N1", ["string:'pending'", "T"], 2], ["Insert", "N2", ["identifier:self", "T"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:status", "T"], 2]]
gconsidine/awx@668bce8212f5b45360a31f9cbd1928db16a35283
fix job launch deadlock * This both fixes the deadlock problem and a logic problem. We shouldn't set the job's job_template current_job to pending jobs.
[ { "sha": "b7b5ca80738507d87cc36c042889d0e952abb3ec", "filename": "awx/main/models/unified_jobs.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/668bce8212f5b45360a31f9cbd1928db16a35283/awx%2Fmain%2Fmodels%2Funified_jobs.py", "raw_url": "https://github.com/gconsidine/awx/raw/668bce8212f5b45360a31f9cbd1928db16a35283/awx%2Fmain%2Fmodels%2Funified_jobs.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmodels%2Funified_jobs.py?ref=668bce8212f5b45360a31f9cbd1928db16a35283", "patch": "@@ -712,7 +712,7 @@ def save(self, *args, **kwargs):\n result = super(UnifiedJob, self).save(*args, **kwargs)\n \n # If status changed, update the parent instance.\n- if self.status != status_before:\n+ if self.status != status_before and self.status != 'pending':\n self._update_parent_instance()\n \n # Done." } ]
awx
e04c4e30d8e036f68168408762fc4cd57e6c6723
dae57b71d633c86fceb44233de5af0c3c7a3fae4
awx/main/tests/functional/commands/test_cleanup_facts.py
https://github.com/gconsidine/awx
true
false
true
@@ -108,7 +108,7 @@ def test_system_tracking_feature_disabled(mocker): cmd = Command() with pytest.raises(CommandError) as err: cmd.handle(None) - assert 'The System Tracking feature is not enabled for your Tower instance' in err.value + assert 'The System Tracking feature is not enabled for your instance' in err.value @mock.patch('awx.main.management.commands.cleanup_facts.feature_enabled', new=mock_feature_enabled)
assert 'The System Tracking feature is not enabled for your Tower instance' in err . value
assert 'The System Tracking feature is not enabled for your instance' in err . value
CHANGE_STRING_LITERAL
[["Update", ["string:'The System Tracking feature is not enabled for your Tower instance'", 3, 12, 3, 80], "'The System Tracking feature is not enabled for your instance'"]]
gconsidine/awx@e04c4e30d8e036f68168408762fc4cd57e6c6723
update tests to fix the removal of the word Tower
[ { "sha": "35a531fe86196def9fccfcff737a97a35fb253d3", "filename": "awx/main/tests/functional/commands/test_cleanup_facts.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/e04c4e30d8e036f68168408762fc4cd57e6c6723/awx%2Fmain%2Ftests%2Ffunctional%2Fcommands%2Ftest_cleanup_facts.py", "raw_url": "https://github.com/gconsidine/awx/raw/e04c4e30d8e036f68168408762fc4cd57e6c6723/awx%2Fmain%2Ftests%2Ffunctional%2Fcommands%2Ftest_cleanup_facts.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftests%2Ffunctional%2Fcommands%2Ftest_cleanup_facts.py?ref=e04c4e30d8e036f68168408762fc4cd57e6c6723", "patch": "@@ -108,7 +108,7 @@ def test_system_tracking_feature_disabled(mocker):\n cmd = Command()\n with pytest.raises(CommandError) as err:\n cmd.handle(None)\n- assert 'The System Tracking feature is not enabled for your Tower instance' in err.value\n+ assert 'The System Tracking feature is not enabled for your instance' in err.value\n \n \n @mock.patch('awx.main.management.commands.cleanup_facts.feature_enabled', new=mock_feature_enabled)" } ]
awx
0a339ed05c968df18eaffd0d74f383ee1a20ecdd
ed3e03644beb5c61daa53cdcaa51c2f4eec53766
awx/api/views.py
https://github.com/gconsidine/awx
true
false
true
@@ -2218,7 +2218,7 @@ class GroupHostsList(ControlledByScmMixin, SubListCreateAttachDetachAPIView): parent_group = Group.objects.get(id=self.kwargs['pk']) # Inject parent group inventory ID into new host data. request.data['inventory'] = parent_group.inventory_id - existing_hosts = Host.objects.filter(inventory=parent_group.inventory, name=request.data['name']) + existing_hosts = Host.objects.filter(inventory=parent_group.inventory, name=request.data.get('name', '')) if existing_hosts.count() > 0 and ('variables' not in request.data or request.data['variables'] == '' or request.data['variables'] == '{}' or
existing_hosts = Host . objects . filter ( inventory = parent_group . inventory , name = request . data [ 'name' ] )
existing_hosts = Host . objects . filter ( inventory = parent_group . inventory , name = request . data . get ( 'name' , '' ) )
SINGLE_STMT
[["Insert", ["keyword_argument", 3, 80, 3, 105], ["call", "N0"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Move", "N1", ["attribute", 3, 85, 3, 97], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:get", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Move", "N2", ["string:'name'", 3, 98, 3, 104], 1], ["Insert", "N2", [",:,", "T"], 2], ["Insert", "N2", ["string:''", "T"], 3], ["Insert", "N2", ["):)", "T"], 4], ["Delete", ["[:[", 3, 97, 3, 98]], ["Delete", ["]:]", 3, 104, 3, 105]], ["Delete", ["subscript", 3, 85, 3, 105]]]
gconsidine/awx@0a339ed05c968df18eaffd0d74f383ee1a20ecdd
prevent server error in POST to group hosts
[ { "sha": "35543151745d33019ff8584baec0fbc59a63ec94", "filename": "awx/api/views.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/0a339ed05c968df18eaffd0d74f383ee1a20ecdd/awx%2Fapi%2Fviews.py", "raw_url": "https://github.com/gconsidine/awx/raw/0a339ed05c968df18eaffd0d74f383ee1a20ecdd/awx%2Fapi%2Fviews.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fviews.py?ref=0a339ed05c968df18eaffd0d74f383ee1a20ecdd", "patch": "@@ -2218,7 +2218,7 @@ def create(self, request, *args, **kwargs):\n parent_group = Group.objects.get(id=self.kwargs['pk'])\n # Inject parent group inventory ID into new host data.\n request.data['inventory'] = parent_group.inventory_id\n- existing_hosts = Host.objects.filter(inventory=parent_group.inventory, name=request.data['name'])\n+ existing_hosts = Host.objects.filter(inventory=parent_group.inventory, name=request.data.get('name', ''))\n if existing_hosts.count() > 0 and ('variables' not in request.data or\n request.data['variables'] == '' or\n request.data['variables'] == '{}' or" } ]
awx
7a795b86816e816f52ae25d1cb539f88fe72d898
459c33d2723c3e2207b1575e01f68a84e554a179
awx/main/models/unified_jobs.py
https://github.com/gconsidine/awx
true
false
true
@@ -713,7 +713,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique result = super(UnifiedJob, self).save(*args, **kwargs) # If status changed, update the parent instance. - if self.status != status_before: + if self.status != status_before and self.status != 'pending': self._update_parent_instance() # Done.
if self . status != status_before : self . _update_parent_instance ( )
if self . status != status_before and self . status != 'pending' : self . _update_parent_instance ( )
MORE_SPECIFIC_IF
[["Insert", ["if_statement", 3, 9, 4, 43], ["boolean_operator", "N0"], 1], ["Move", "N0", ["comparison_operator", 3, 12, 3, 40], 0], ["Insert", "N0", ["and:and", "T"], 1], ["Insert", "N0", ["comparison_operator", "N1"], 2], ["Insert", "N1", ["attribute", "N2"], 0], ["Insert", "N1", ["!=:!=", "T"], 1], ["Insert", "N1", ["string:'pending'", "T"], 2], ["Insert", "N2", ["identifier:self", "T"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:status", "T"], 2]]
gconsidine/awx@7a795b86816e816f52ae25d1cb539f88fe72d898
fix job launch deadlock * This both fixes the deadlock problem and a logic problem. We shouldn't set the job's job_template current_job to pending jobs.
[ { "sha": "e9a4f00c91d7f4b818ec7011ca8603dff29cb18a", "filename": "awx/main/models/unified_jobs.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/7a795b86816e816f52ae25d1cb539f88fe72d898/awx%2Fmain%2Fmodels%2Funified_jobs.py", "raw_url": "https://github.com/gconsidine/awx/raw/7a795b86816e816f52ae25d1cb539f88fe72d898/awx%2Fmain%2Fmodels%2Funified_jobs.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmodels%2Funified_jobs.py?ref=7a795b86816e816f52ae25d1cb539f88fe72d898", "patch": "@@ -713,7 +713,7 @@ def save(self, *args, **kwargs):\n result = super(UnifiedJob, self).save(*args, **kwargs)\n \n # If status changed, update the parent instance.\n- if self.status != status_before:\n+ if self.status != status_before and self.status != 'pending':\n self._update_parent_instance()\n \n # Done." } ]
awx
c1998e378df35ccc4f233ffc25fcadb335338c3c
c30545e220424ef1cdd4f5d26b87f6a0067e93ea
awx/main/models/projects.py
https://github.com/gconsidine/awx
true
false
true
@@ -473,7 +473,7 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin): @property def task_impact(self): - return 20 + return 0 if self.job_type == 'run' else 20 @property def result_stdout(self):
return 20
return 0 if self . job_type == 'run' else 20
SINGLE_STMT
[["Insert", ["return_statement", 3, 9, 3, 18], ["conditional_expression", "N0"], 1], ["Insert", "N0", ["integer:0", "T"], 0], ["Insert", "N0", ["if:if", "T"], 1], ["Insert", "N0", ["comparison_operator", "N1"], 2], ["Insert", "N0", ["else:else", "T"], 3], ["Move", "N0", ["integer:20", 3, 16, 3, 18], 4], ["Insert", "N1", ["attribute", "N2"], 0], ["Insert", "N1", ["==:==", "T"], 1], ["Insert", "N1", ["string:'run'", "T"], 2], ["Insert", "N2", ["identifier:self", "T"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:job_type", "T"], 2]]
gconsidine/awx@c1998e378df35ccc4f233ffc25fcadb335338c3c
Remove run type project update task_impact
[ { "sha": "4536e4a307709e8b6c86f427e745df9aa52ab4b5", "filename": "awx/main/models/projects.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/c1998e378df35ccc4f233ffc25fcadb335338c3c/awx%2Fmain%2Fmodels%2Fprojects.py", "raw_url": "https://github.com/gconsidine/awx/raw/c1998e378df35ccc4f233ffc25fcadb335338c3c/awx%2Fmain%2Fmodels%2Fprojects.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmodels%2Fprojects.py?ref=c1998e378df35ccc4f233ffc25fcadb335338c3c", "patch": "@@ -473,7 +473,7 @@ def websocket_emit_data(self):\n \n @property\n def task_impact(self):\n- return 20\n+ return 0 if self.job_type == 'run' else 20\n \n @property\n def result_stdout(self):" } ]
awx
450baf2e8d56b213750c4ed278e6f58bf7be6975
ff14c5bafb119928b4bfc85c3722ae92863ef367
awx/main/management/commands/inventory_import.py
https://github.com/gconsidine/awx
true
false
true
@@ -852,7 +852,7 @@ class Command(NoArgsCommand): if license_info.get('license_key', 'UNLICENSED') == 'UNLICENSED': logger.error(LICENSE_NON_EXISTANT_MESSAGE) raise CommandError('No license found!') - elif license_info('license_type', 'UNLICENSED') == 'open': + elif license_info.get('license_type', 'UNLICENSED') == 'open': return available_instances = license_info.get('available_instances', 0) free_instances = license_info.get('free_instances', 0)
if license_info . get ( 'license_key' , 'UNLICENSED' ) == 'UNLICENSED' : logger . error ( LICENSE_NON_EXISTANT_MESSAGE ) raise CommandError ( 'No license found!' ) elif license_info ( 'license_type' , 'UNLICENSED' ) == 'open' : return
if license_info . get ( 'license_key' , 'UNLICENSED' ) == 'UNLICENSED' : logger . error ( LICENSE_NON_EXISTANT_MESSAGE ) raise CommandError ( 'No license found!' ) elif license_info . get ( 'license_type' , 'UNLICENSED' ) == 'open' : return
ADD_ATTRIBUTE_ACCESS
[["Insert", ["call", 3, 14, 3, 56], ["attribute", "N0"], 0], ["Move", "N0", ["identifier:license_info", 3, 14, 3, 26], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:get", "T"], 2]]
gconsidine/awx@450baf2e8d56b213750c4ed278e6f58bf7be6975
Fix a dictionary reference when getting the license type on import
[ { "sha": "4e6c0a66ee3a8145f223642ab7a2cd6d86dba75a", "filename": "awx/main/management/commands/inventory_import.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/450baf2e8d56b213750c4ed278e6f58bf7be6975/awx%2Fmain%2Fmanagement%2Fcommands%2Finventory_import.py", "raw_url": "https://github.com/gconsidine/awx/raw/450baf2e8d56b213750c4ed278e6f58bf7be6975/awx%2Fmain%2Fmanagement%2Fcommands%2Finventory_import.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmanagement%2Fcommands%2Finventory_import.py?ref=450baf2e8d56b213750c4ed278e6f58bf7be6975", "patch": "@@ -852,7 +852,7 @@ def check_license(self):\n if license_info.get('license_key', 'UNLICENSED') == 'UNLICENSED':\n logger.error(LICENSE_NON_EXISTANT_MESSAGE)\n raise CommandError('No license found!')\n- elif license_info('license_type', 'UNLICENSED') == 'open':\n+ elif license_info.get('license_type', 'UNLICENSED') == 'open':\n return\n available_instances = license_info.get('available_instances', 0)\n free_instances = license_info.get('free_instances', 0)" } ]
awx
5cbdadc3e8338f26fd7c061c269f0360016fa0a3
12537c2739485e930b282bc744a7a0a5314a8268
awx/plugins/library/scan_packages.py
https://github.com/gconsidine/awx
true
false
true
@@ -74,7 +74,7 @@ def rpm_package_list(): def deb_package_list(): import apt apt_cache = apt.Cache() - installed_packages = [] + installed_packages = {} apt_installed_packages = [pk for pk in apt_cache.keys() if apt_cache[pk].is_installed] for package in apt_installed_packages: ac_pkg = apt_cache[package].installed
installed_packages = [ ]
installed_packages = { }
SINGLE_STMT
[["Insert", ["assignment", 3, 5, 3, 28], ["dictionary", "N0"], 2], ["Insert", "N0", ["{:{", "T"], 0], ["Insert", "N0", ["}:}", "T"], 1], ["Delete", ["[:[", 3, 26, 3, 27]], ["Delete", ["]:]", 3, 27, 3, 28]], ["Delete", ["list", 3, 26, 3, 28]]]
gconsidine/awx@5cbdadc3e8338f26fd7c061c269f0360016fa0a3
Fix typo in scan_packages plugin
[ { "sha": "3fd2edc1fa2217441363d5b1c5b4dd612fc832fd", "filename": "awx/plugins/library/scan_packages.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/5cbdadc3e8338f26fd7c061c269f0360016fa0a3/awx%2Fplugins%2Flibrary%2Fscan_packages.py", "raw_url": "https://github.com/gconsidine/awx/raw/5cbdadc3e8338f26fd7c061c269f0360016fa0a3/awx%2Fplugins%2Flibrary%2Fscan_packages.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fplugins%2Flibrary%2Fscan_packages.py?ref=5cbdadc3e8338f26fd7c061c269f0360016fa0a3", "patch": "@@ -74,7 +74,7 @@ def rpm_package_list():\n def deb_package_list():\n import apt\n apt_cache = apt.Cache()\n- installed_packages = []\n+ installed_packages = {}\n apt_installed_packages = [pk for pk in apt_cache.keys() if apt_cache[pk].is_installed]\n for package in apt_installed_packages:\n ac_pkg = apt_cache[package].installed" } ]
awx
4cc58a221b888449f4e1ce89de2f3c7d8fe249b5
e5043093ebd94232564c8d6668b1b51c469eb047
awx/conf/tests/unit/test_settings.py
https://github.com/gconsidine/awx
true
false
true
@@ -264,7 +264,7 @@ def test_setting_from_db_with_unicode(settings, mocker, encrypted): # this simulates a bug in python-memcached; see https://github.com/linsomniac/python-memcached/issues/79 value = six.u('Iñtërnâtiônàlizætiøn').encode('utf-8') - setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value=value) + setting_from_db = mocker.Mock(id=1, key='AWX_SOME_SETTING', value=value) mocks = mocker.Mock(**{ 'order_by.return_value': mocker.Mock(**{ '__iter__': lambda self: iter([setting_from_db]),
setting_from_db = mocker . Mock ( key = 'AWX_SOME_SETTING' , value = value )
setting_from_db = mocker . Mock ( id = 1 , key = 'AWX_SOME_SETTING' , value = value )
SAME_FUNCTION_MORE_ARGS
[["Insert", ["argument_list", 3, 34, 3, 71], ["keyword_argument", "N0"], 1], ["Insert", ["argument_list", 3, 34, 3, 71], [",:,", "T"], 2], ["Insert", "N0", ["identifier:id", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["integer:1", "T"], 2]]
gconsidine/awx@4cc58a221b888449f4e1ce89de2f3c7d8fe249b5
fix busted conf unit tests
[ { "sha": "d290228056f0a4a80b8983c48d12a755f3427008", "filename": "awx/conf/tests/unit/test_settings.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/4cc58a221b888449f4e1ce89de2f3c7d8fe249b5/awx%2Fconf%2Ftests%2Funit%2Ftest_settings.py", "raw_url": "https://github.com/gconsidine/awx/raw/4cc58a221b888449f4e1ce89de2f3c7d8fe249b5/awx%2Fconf%2Ftests%2Funit%2Ftest_settings.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fconf%2Ftests%2Funit%2Ftest_settings.py?ref=4cc58a221b888449f4e1ce89de2f3c7d8fe249b5", "patch": "@@ -264,7 +264,7 @@ def test_setting_from_db_with_unicode(settings, mocker, encrypted):\n # this simulates a bug in python-memcached; see https://github.com/linsomniac/python-memcached/issues/79\n value = six.u('Iñtërnâtiônàlizætiøn').encode('utf-8')\n \n- setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value=value)\n+ setting_from_db = mocker.Mock(id=1, key='AWX_SOME_SETTING', value=value)\n mocks = mocker.Mock(**{\n 'order_by.return_value': mocker.Mock(**{\n '__iter__': lambda self: iter([setting_from_db])," } ]
awx
50b4002280175b55388e81db116a85aadbf14b7a
2677a17c8a9b96d93d8391ee0428fbb0ca18a87c
awx/sso/conf.py
https://github.com/gconsidine/awx
true
false
false
@@ -286,7 +286,7 @@ register( label=_('LDAP Group Type'), help_text=_('The group type may need to be changed based on the type of the ' 'LDAP server. Values are listed at: ' - 'http://pythonhosted.org/django-auth-ldap/groups.html#types-of-groups'), + 'https://django-auth-ldap.readthedocs.io/en/stable/groups.html#types-of-groups'), category=_('LDAP'), category_slug='ldap', feature_required='ldap',
help_text = _ ( 'The group type may need to be changed based on the type of the ' 'LDAP server. Values are listed at: ' 'http://pythonhosted.org/django-auth-ldap/groups.html#types-of-groups' ) ,
help_text = _ ( 'The group type may need to be changed based on the type of the ' 'LDAP server. Values are listed at: ' 'https://django-auth-ldap.readthedocs.io/en/stable/groups.html#types-of-groups' ) ,
CHANGE_STRING_LITERAL
[["Update", ["string:'http://pythonhosted.org/django-auth-ldap/groups.html#types-of-groups'", 3, 17, 3, 87], "'https://django-auth-ldap.readthedocs.io/en/stable/groups.html#types-of-groups'"]]
gconsidine/awx@50b4002280175b55388e81db116a85aadbf14b7a
Fix broken link on LDAP group type help text Signed-off-by: Julen Landa Alustiza <[email protected]>
[ { "sha": "3b9ac1b06320a5b96f08e2d841e00dbdda6aceab", "filename": "awx/sso/conf.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/50b4002280175b55388e81db116a85aadbf14b7a/awx%2Fsso%2Fconf.py", "raw_url": "https://github.com/gconsidine/awx/raw/50b4002280175b55388e81db116a85aadbf14b7a/awx%2Fsso%2Fconf.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fsso%2Fconf.py?ref=50b4002280175b55388e81db116a85aadbf14b7a", "patch": "@@ -286,7 +286,7 @@ def __call__(self):\n label=_('LDAP Group Type'),\n help_text=_('The group type may need to be changed based on the type of the '\n 'LDAP server. Values are listed at: '\n- 'http://pythonhosted.org/django-auth-ldap/groups.html#types-of-groups'),\n+ 'https://django-auth-ldap.readthedocs.io/en/stable/groups.html#types-of-groups'),\n category=_('LDAP'),\n category_slug='ldap',\n feature_required='ldap'," } ]
awx
2818bb5833cefb4c6f33e1b809f748d3012dc1c9
02e3f45422611a44c12482a7d60295fc429f14b0
awx/main/expect/isolated_manager.py
https://github.com/gconsidine/awx
true
false
true
@@ -430,7 +430,7 @@ class IsolatedManager(object): except (KeyError, IndexError): task_result = {} if 'capacity' in task_result: - cls.update_capacity(instance, awx_application_version) + cls.update_capacity(instance, task_result, awx_application_version) elif instance.capacity == 0: logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format( instance.hostname))
cls . update_capacity ( instance , awx_application_version )
cls . update_capacity ( instance , task_result , awx_application_version )
SAME_FUNCTION_MORE_ARGS
[["Insert", ["argument_list", 3, 36, 3, 71], ["identifier:task_result", "T"], 3], ["Insert", ["argument_list", 3, 36, 3, 71], [",:,", "T"], 4]]
gconsidine/awx@2818bb5833cefb4c6f33e1b809f748d3012dc1c9
fix missing parameter to update_capacity method
[ { "sha": "265531443ff066f62ee44f9292c7bff4aa148cf9", "filename": "awx/main/expect/isolated_manager.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/2818bb5833cefb4c6f33e1b809f748d3012dc1c9/awx%2Fmain%2Fexpect%2Fisolated_manager.py", "raw_url": "https://github.com/gconsidine/awx/raw/2818bb5833cefb4c6f33e1b809f748d3012dc1c9/awx%2Fmain%2Fexpect%2Fisolated_manager.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fexpect%2Fisolated_manager.py?ref=2818bb5833cefb4c6f33e1b809f748d3012dc1c9", "patch": "@@ -430,7 +430,7 @@ def health_check(cls, instance_qs, awx_application_version):\n except (KeyError, IndexError):\n task_result = {}\n if 'capacity' in task_result:\n- cls.update_capacity(instance, awx_application_version)\n+ cls.update_capacity(instance, task_result, awx_application_version)\n elif instance.capacity == 0:\n logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(\n instance.hostname))" } ]
awx
f2f42c2c8a840d19c6e1c92d59af57c88fc78065
70f8ec78de61d68b771ac89276053a57e5fa4d6f
awx/sso/backends.py
https://github.com/gconsidine/awx
true
false
true
@@ -307,7 +307,7 @@ def _update_m2m_from_groups(user, ldap_user, rel, opts, remove=True): should_add = True if should_add: rel.add(user) - elif remove: + elif remove and user in rel.all(): rel.remove(user)
if should_add : rel . add ( user ) elif remove : rel . remove ( user )
if should_add : rel . add ( user ) elif remove and user in rel . all ( ) : rel . remove ( user )
MORE_SPECIFIC_IF
[["Insert", ["elif_clause", 3, 5, 4, 25], ["boolean_operator", "N0"], 1], ["Move", "N0", ["identifier:remove", 3, 10, 3, 16], 0], ["Insert", "N0", ["and:and", "T"], 1], ["Insert", "N0", ["comparison_operator", "N1"], 2], ["Insert", "N1", ["identifier:user", "T"], 0], ["Insert", "N1", ["in:in", "T"], 1], ["Insert", "N1", ["call", "N2"], 2], ["Insert", "N2", ["attribute", "N3"], 0], ["Insert", "N2", ["argument_list", "N4"], 1], ["Insert", "N3", ["identifier:rel", "T"], 0], ["Insert", "N3", [".:.", "T"], 1], ["Insert", "N3", ["identifier:all", "T"], 2], ["Insert", "N4", ["(:(", "T"], 0], ["Insert", "N4", ["):)", "T"], 1]]
gconsidine/awx@f2f42c2c8a840d19c6e1c92d59af57c88fc78065
don't append to the activity stream on LDAP group disassociate for organizations w/ a large number of ldap orgs/teams, this results in a _huge_ number of extraneous activity stream entries see: https://github.com/ansible/ansible-tower/issues/7655
[ { "sha": "5ed0385018ddcbb2741becead411dbcd0c86793b", "filename": "awx/sso/backends.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/f2f42c2c8a840d19c6e1c92d59af57c88fc78065/awx%2Fsso%2Fbackends.py", "raw_url": "https://github.com/gconsidine/awx/raw/f2f42c2c8a840d19c6e1c92d59af57c88fc78065/awx%2Fsso%2Fbackends.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fsso%2Fbackends.py?ref=f2f42c2c8a840d19c6e1c92d59af57c88fc78065", "patch": "@@ -307,7 +307,7 @@ def _update_m2m_from_groups(user, ldap_user, rel, opts, remove=True):\n should_add = True\n if should_add:\n rel.add(user)\n- elif remove:\n+ elif remove and user in rel.all():\n rel.remove(user)\n \n " } ]
awx
7a21a4578129ddc3797cf387d73a5f3551036269
91ec0a4482e90219bf1c8d80b98ffea110bc0f33
awx/sso/validators.py
https://github.com/gconsidine/awx
true
false
true
@@ -22,7 +22,7 @@ def validate_ldap_dn(value, with_user=False): else: dn_value = value try: - ldap.dn.str2dn(dn_value) + ldap.dn.str2dn(dn_value.encode('utf-8')) except ldap.DECODING_ERROR: raise ValidationError(_('Invalid DN: %s') % value)
ldap . dn . str2dn ( dn_value )
ldap . dn . str2dn ( dn_value . encode ( 'utf-8' ) )
ADD_METHOD_CALL
[["Insert", ["argument_list", 3, 23, 3, 33], ["call", "N0"], 1], ["Insert", ["argument_list", 3, 23, 3, 33], ["):)", "T"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Move", "N1", ["identifier:dn_value", 3, 24, 3, 32], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:encode", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["string:'utf-8'", "T"], 1], ["Move", "N2", ["):)", 3, 32, 3, 33], 2]]
gconsidine/awx@7a21a4578129ddc3797cf387d73a5f3551036269
properly encode LDAP DN values on validation see: https://github.com/ansible/ansible-tower/issues/7554
[ { "sha": "dd1086a4267d7d0941f53b1e974fbac576687c9c", "filename": "awx/sso/validators.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/7a21a4578129ddc3797cf387d73a5f3551036269/awx%2Fsso%2Fvalidators.py", "raw_url": "https://github.com/gconsidine/awx/raw/7a21a4578129ddc3797cf387d73a5f3551036269/awx%2Fsso%2Fvalidators.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fsso%2Fvalidators.py?ref=7a21a4578129ddc3797cf387d73a5f3551036269", "patch": "@@ -22,7 +22,7 @@ def validate_ldap_dn(value, with_user=False):\n else:\n dn_value = value\n try:\n- ldap.dn.str2dn(dn_value)\n+ ldap.dn.str2dn(dn_value.encode('utf-8'))\n except ldap.DECODING_ERROR:\n raise ValidationError(_('Invalid DN: %s') % value)\n " } ]
awx
798d27c2cbbb3a37ecc2438c785ee6f2891cd3e1
5b4dc9e7ee656a96095b7b46b246dd97308fc74c
awx/main/tests/unit/test_task_manager.py
https://github.com/gconsidine/awx
true
false
false
@@ -52,7 +52,7 @@ class TestCleanupInconsistentCeleryTasks(): logger_mock.error.assert_called_once_with("Task job 2 (failed) DB error in marking failed. Job possibly deleted.") @mock.patch.object(InstanceGroup.objects, 'prefetch_related', return_value=[]) - @mock.patch('awx.main.scheduler.task_manager.inspect') + @mock.patch('awx.main.scheduler.task_manager.Inspect') def test_multiple_active_instances_sanity_check(self, inspect_mock, *args): class MockInspector: pass
@ mock . patch . object ( InstanceGroup . objects , 'prefetch_related' , return_value = [ ] ) @ mock . patch ( 'awx.main.scheduler.task_manager.inspect' ) def test_multiple_active_instances_sanity_check ( self , inspect_mock , * args ) : class MockInspector : pass
@ mock . patch . object ( InstanceGroup . objects , 'prefetch_related' , return_value = [ ] ) @ mock . patch ( 'awx.main.scheduler.task_manager.Inspect' ) def test_multiple_active_instances_sanity_check ( self , inspect_mock , * args ) : class MockInspector : pass
CHANGE_STRING_LITERAL
[["Update", ["string:'awx.main.scheduler.task_manager.inspect'", 3, 17, 3, 58], "'awx.main.scheduler.task_manager.Inspect'"]]
gconsidine/awx@798d27c2cbbb3a37ecc2438c785ee6f2891cd3e1
Fix task_manager test
[ { "sha": "da3bddc5e42f9422b63adfe6cc084b41aa2fc648", "filename": "awx/main/tests/unit/test_task_manager.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/798d27c2cbbb3a37ecc2438c785ee6f2891cd3e1/awx%2Fmain%2Ftests%2Funit%2Ftest_task_manager.py", "raw_url": "https://github.com/gconsidine/awx/raw/798d27c2cbbb3a37ecc2438c785ee6f2891cd3e1/awx%2Fmain%2Ftests%2Funit%2Ftest_task_manager.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftests%2Funit%2Ftest_task_manager.py?ref=798d27c2cbbb3a37ecc2438c785ee6f2891cd3e1", "patch": "@@ -52,7 +52,7 @@ def test_save_failed(self, logger_mock, get_running_tasks, *args):\n logger_mock.error.assert_called_once_with(\"Task job 2 (failed) DB error in marking failed. Job possibly deleted.\")\n \n @mock.patch.object(InstanceGroup.objects, 'prefetch_related', return_value=[])\n- @mock.patch('awx.main.scheduler.task_manager.inspect')\n+ @mock.patch('awx.main.scheduler.task_manager.Inspect')\n def test_multiple_active_instances_sanity_check(self, inspect_mock, *args):\n class MockInspector:\n pass" } ]
awx
a880f47925d57b4ff319893efd66447a20937626
ce6d96feda020b0064d9e5eb52227929f4c7e7c6
awx/main/access.py
https://github.com/gconsidine/awx
true
false
true
@@ -883,7 +883,7 @@ class InventoryUpdateAccess(BaseAccess): prefetch_related = ('unified_job_template', 'instance_group',) def filtered_queryset(self): - return qs.filter(inventory_source__inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role')) + return self.model.objects.filter(inventory_source__inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role')) def can_cancel(self, obj): if not obj.can_cancel:
return qs . filter ( inventory_source__inventory__in = Inventory . accessible_pk_qs ( self . user , 'read_role' ) )
return self . model . objects . filter ( inventory_source__inventory__in = Inventory . accessible_pk_qs ( self . user , 'read_role' ) )
SINGLE_STMT
[["Insert", ["attribute", 3, 16, 3, 25], ["attribute", "N0"], 0], ["Insert", ["attribute", 3, 16, 3, 25], [".:.", "T"], 1], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:objects", "T"], 2], ["Update", ["identifier:qs", 3, 16, 3, 18], "self"], ["Move", "N1", ["identifier:qs", 3, 16, 3, 18], 0], ["Move", "N1", [".:.", 3, 18, 3, 19], 1], ["Insert", "N1", ["identifier:model", "T"], 2]]
gconsidine/awx@a880f47925d57b4ff319893efd66447a20937626
fix bug with inventory update queryset
[ { "sha": "5fdf1ae78973f8e9e245a92fbae2304eb206a914", "filename": "awx/main/access.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/a880f47925d57b4ff319893efd66447a20937626/awx%2Fmain%2Faccess.py", "raw_url": "https://github.com/gconsidine/awx/raw/a880f47925d57b4ff319893efd66447a20937626/awx%2Fmain%2Faccess.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Faccess.py?ref=a880f47925d57b4ff319893efd66447a20937626", "patch": "@@ -883,7 +883,7 @@ class InventoryUpdateAccess(BaseAccess):\n prefetch_related = ('unified_job_template', 'instance_group',)\n \n def filtered_queryset(self):\n- return qs.filter(inventory_source__inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role'))\n+ return self.model.objects.filter(inventory_source__inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role'))\n \n def can_cancel(self, obj):\n if not obj.can_cancel:" } ]
awx
febfcf709dc2c3248a9ae86ddcb749745448ab6a
cf1d5a29f6218672d32a19c73c3b4d81cddb3924
tools/sosreport/tower.py
https://github.com/gconsidine/awx
true
false
false
@@ -8,7 +8,7 @@ SOSREPORT_TOWER_COMMANDS = [ "ansible --version", # ansible core version "awx-manage --version", # tower version "supervisorctl status", # tower process status - "/var/lib/awx/venv/tower/bin/pip freeze", # pip package list + "/var/lib/awx/venv/awx/bin/pip freeze", # pip package list "/var/lib/awx/venv/ansible/bin/pip freeze", # pip package list "tree -d /var/lib/awx", # show me the dirs "ls -ll /var/lib/awx", # check permissions
"/var/lib/awx/venv/tower/bin/pip freeze" ,
"/var/lib/awx/venv/awx/bin/pip freeze" ,
CHANGE_STRING_LITERAL
[["Update", ["string:\"/var/lib/awx/venv/tower/bin/pip freeze\"", 3, 5, 3, 45], "\"/var/lib/awx/venv/awx/bin/pip freeze\""]]
gconsidine/awx@febfcf709dc2c3248a9ae86ddcb749745448ab6a
fix incorrect virtualenv path for sosreport plugin
[ { "sha": "732bcca9f78e6534b496504eb71fc4e2a9f08aaa", "filename": "tools/sosreport/tower.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/febfcf709dc2c3248a9ae86ddcb749745448ab6a/tools%2Fsosreport%2Ftower.py", "raw_url": "https://github.com/gconsidine/awx/raw/febfcf709dc2c3248a9ae86ddcb749745448ab6a/tools%2Fsosreport%2Ftower.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/tools%2Fsosreport%2Ftower.py?ref=febfcf709dc2c3248a9ae86ddcb749745448ab6a", "patch": "@@ -8,7 +8,7 @@\n \"ansible --version\", # ansible core version\n \"awx-manage --version\", # tower version\n \"supervisorctl status\", # tower process status\n- \"/var/lib/awx/venv/tower/bin/pip freeze\", # pip package list\n+ \"/var/lib/awx/venv/awx/bin/pip freeze\", # pip package list\n \"/var/lib/awx/venv/ansible/bin/pip freeze\", # pip package list\n \"tree -d /var/lib/awx\", # show me the dirs\n \"ls -ll /var/lib/awx\", # check permissions" } ]
awx
2621f48189519fe94edefac60ef9bceaf6892ad6
b56db8bc405f5db1476c0b107e24de1847ff24f8
awx/lib/tower_display_callback/module.py
https://github.com/gconsidine/awx
true
false
true
@@ -182,7 +182,7 @@ class BaseCallbackModule(CallbackBase): def v2_playbook_on_include(self, included_file): event_data = dict( - included_file=included_file, + included_file=included_file._filename if included_file is not None else None, ) with self.capture_event_data('playbook_on_include', **event_data): super(BaseCallbackModule, self).v2_playbook_on_include(included_file)
event_data = dict ( included_file = included_file , )
event_data = dict ( included_file = included_file . _filename if included_file is not None else None , )
SINGLE_STMT
[["Insert", ["keyword_argument", 3, 13, 3, 40], ["conditional_expression", "N0"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["if:if", "T"], 1], ["Insert", "N0", ["comparison_operator", "N2"], 2], ["Insert", "N0", ["else:else", "T"], 3], ["Insert", "N0", ["none:None", "T"], 4], ["Move", "N1", ["identifier:included_file", 3, 27, 3, 40], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:_filename", "T"], 2], ["Insert", "N2", ["identifier:included_file", "T"], 0], ["Insert", "N2", ["is:is", "T"], 1], ["Insert", "N2", ["not:not", "T"], 2], ["Insert", "N2", ["none:None", "T"], 3]]
gconsidine/awx@2621f48189519fe94edefac60ef9bceaf6892ad6
We specifically need to convert the ansible included file type for include events
[ { "sha": "c553b08853deaf474d005988b1d5e47bd858a1b8", "filename": "awx/lib/tower_display_callback/module.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/2621f48189519fe94edefac60ef9bceaf6892ad6/awx%2Flib%2Ftower_display_callback%2Fmodule.py", "raw_url": "https://github.com/gconsidine/awx/raw/2621f48189519fe94edefac60ef9bceaf6892ad6/awx%2Flib%2Ftower_display_callback%2Fmodule.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Flib%2Ftower_display_callback%2Fmodule.py?ref=2621f48189519fe94edefac60ef9bceaf6892ad6", "patch": "@@ -182,7 +182,7 @@ def v2_playbook_on_vars_prompt(self, varname, private=True, prompt=None,\n \n def v2_playbook_on_include(self, included_file):\n event_data = dict(\n- included_file=included_file,\n+ included_file=included_file._filename if included_file is not None else None,\n )\n with self.capture_event_data('playbook_on_include', **event_data):\n super(BaseCallbackModule, self).v2_playbook_on_include(included_file)" } ]
awx
5943193b78dff5f61268f2f2f1bbc69adf898a49
2a8e57e6557552fea50cd1d94449dacc2220b627
awx/conf/settings.py
https://github.com/gconsidine/awx
true
false
true
@@ -203,7 +203,7 @@ class SettingsWrapper(UserSettingsHolder): except AttributeError: file_default = None if file_default != init_default and file_default is not None: - logger.warning('Setting %s has been marked read-only!', key) + logger.debug('Setting %s has been marked read-only!', key) self.registry._registry[key]['read_only'] = True self.registry._registry[key]['defined_in_file'] = True self.__dict__['_awx_conf_init_readonly'] = True
logger . warning ( 'Setting %s has been marked read-only!' , key )
logger . debug ( 'Setting %s has been marked read-only!' , key )
WRONG_FUNCTION_NAME
[["Update", ["identifier:warning", 3, 32, 3, 39], "debug"]]
gconsidine/awx@5943193b78dff5f61268f2f2f1bbc69adf898a49
Move the warnings on settings-in-conf-files to 'debug'. As we write the pendo settings on install, this becomes fairly noisy in practice if we use 'warning'.
[ { "sha": "8b1c0786a1f13c8a0e95452da826f73a550cc420", "filename": "awx/conf/settings.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/5943193b78dff5f61268f2f2f1bbc69adf898a49/awx%2Fconf%2Fsettings.py", "raw_url": "https://github.com/gconsidine/awx/raw/5943193b78dff5f61268f2f2f1bbc69adf898a49/awx%2Fconf%2Fsettings.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fconf%2Fsettings.py?ref=5943193b78dff5f61268f2f2f1bbc69adf898a49", "patch": "@@ -203,7 +203,7 @@ def _preload_cache(self):\n except AttributeError:\n file_default = None\n if file_default != init_default and file_default is not None:\n- logger.warning('Setting %s has been marked read-only!', key)\n+ logger.debug('Setting %s has been marked read-only!', key)\n self.registry._registry[key]['read_only'] = True\n self.registry._registry[key]['defined_in_file'] = True\n self.__dict__['_awx_conf_init_readonly'] = True" } ]
awx
8d643dc4596eb81c5d6b4b78d05468d8ae71884f
1cbbb6a87c35082f134d0e8a242a243215872d54
awx/conf/fields.py
https://github.com/gconsidine/awx
true
false
true
@@ -52,7 +52,7 @@ class URLField(CharField): if url_parts.hostname and '.' not in url_parts.hostname: netloc = '{}.local'.format(url_parts.hostname) if url_parts.port: - netloc = '{}:{}'.format(netloc, port) + netloc = '{}:{}'.format(netloc, url_parts.port) if url_parts.username: if url_parts.password: netloc = '{}:{}@{}' % (url_parts.username, url_parts.password, netloc)
netloc = '{}:{}' . format ( netloc , port )
netloc = '{}:{}' . format ( netloc , url_parts . port )
SINGLE_STMT
[["Insert", ["argument_list", 3, 48, 3, 62], ["attribute", "N0"], 3], ["Insert", "N0", ["identifier:url_parts", "T"], 0], ["Insert", "N0", [".:.", "T"], 1], ["Move", "N0", ["identifier:port", 3, 57, 3, 61], 2]]
gconsidine/awx@8d643dc4596eb81c5d6b4b78d05468d8ae71884f
Fix an issue not properly parsing the url port of the tower host When attempting to determine the proper url for URLField
[ { "sha": "13d80ae937b24577156b9610da5dceba11b46e8b", "filename": "awx/conf/fields.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/8d643dc4596eb81c5d6b4b78d05468d8ae71884f/awx%2Fconf%2Ffields.py", "raw_url": "https://github.com/gconsidine/awx/raw/8d643dc4596eb81c5d6b4b78d05468d8ae71884f/awx%2Fconf%2Ffields.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fconf%2Ffields.py?ref=8d643dc4596eb81c5d6b4b78d05468d8ae71884f", "patch": "@@ -52,7 +52,7 @@ def run_validators(self, value):\n if url_parts.hostname and '.' not in url_parts.hostname:\n netloc = '{}.local'.format(url_parts.hostname)\n if url_parts.port:\n- netloc = '{}:{}'.format(netloc, port)\n+ netloc = '{}:{}'.format(netloc, url_parts.port)\n if url_parts.username:\n if url_parts.password:\n netloc = '{}:{}@{}' % (url_parts.username, url_parts.password, netloc)" } ]
awx
cae8950723dbcde382fb5be300fc9c0d71c99cf1
4ce69f2f9c79dc45f5dc9bc04d256d196e800650
awx/api/views.py
https://github.com/gconsidine/awx
true
false
true
@@ -518,7 +518,7 @@ class AuthView(APIView): def get(self, request): data = OrderedDict() err_backend, err_message = request.session.get('social_auth_error', (None, None)) - auth_backends = load_backends(settings.AUTHENTICATION_BACKENDS).items() + auth_backends = load_backends(settings.AUTHENTICATION_BACKENDS, force_load=True).items() # Return auth backends in consistent order: Google, GitHub, SAML. auth_backends.sort(key=lambda x: 'g' if x[0] == 'google-oauth2' else x[0]) for name, backend in auth_backends:
auth_backends = load_backends ( settings . AUTHENTICATION_BACKENDS ) . items ( )
auth_backends = load_backends ( settings . AUTHENTICATION_BACKENDS , force_load = True ) . items ( )
SAME_FUNCTION_MORE_ARGS
[["Insert", ["argument_list", 3, 38, 3, 72], [",:,", "T"], 2], ["Insert", ["argument_list", 3, 38, 3, 72], ["keyword_argument", "N0"], 3], ["Insert", "N0", ["identifier:force_load", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["true:True", "T"], 2]]
gconsidine/awx@cae8950723dbcde382fb5be300fc9c0d71c99cf1
don't cache social-auth-core backends social-auth-core uses a global variable to cache backend settings: https://github.com/python-social-auth/social-core/blob/78da4eb201dd22fd2d8a4e38a1d17a73beabad24/social_core/backends/utils.py#L9 when loading backends, forcibly ignore this behavior to avoid a thread-safety issue that causes #4788 #4045
[ { "sha": "97a90428cefdaae91e8195a8601fda799defbd52", "filename": "awx/api/views.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/cae8950723dbcde382fb5be300fc9c0d71c99cf1/awx%2Fapi%2Fviews.py", "raw_url": "https://github.com/gconsidine/awx/raw/cae8950723dbcde382fb5be300fc9c0d71c99cf1/awx%2Fapi%2Fviews.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fviews.py?ref=cae8950723dbcde382fb5be300fc9c0d71c99cf1", "patch": "@@ -518,7 +518,7 @@ class AuthView(APIView):\n def get(self, request):\n data = OrderedDict()\n err_backend, err_message = request.session.get('social_auth_error', (None, None))\n- auth_backends = load_backends(settings.AUTHENTICATION_BACKENDS).items()\n+ auth_backends = load_backends(settings.AUTHENTICATION_BACKENDS, force_load=True).items()\n # Return auth backends in consistent order: Google, GitHub, SAML.\n auth_backends.sort(key=lambda x: 'g' if x[0] == 'google-oauth2' else x[0])\n for name, backend in auth_backends:" } ]
awx
e36cd17c161c21c744bd4654f605e2b47cd702a5
ce0096f308c8745851c05e229e9cd70ef95955b5
awx/api/views.py
https://github.com/gconsidine/awx
true
false
true
@@ -2487,7 +2487,7 @@ class JobTemplateSurveySpec(GenericAPIView): return Response(dict(error=_("'required' missing from survey question %s.") % str(idx)), status=status.HTTP_400_BAD_REQUEST) if survey_item["type"] == "password": - if "default" in survey_item and survey_item["default"].startswith('$encrypted$'): + if survey_item.get("default") and survey_item["default"].startswith('$encrypted$'): old_spec = obj.survey_spec for old_item in old_spec['spec']: if old_item['variable'] == survey_item['variable']:
if "default" in survey_item and survey_item [ "default" ] . startswith ( '$encrypted$' ) : old_spec = obj . survey_spec for old_item in old_spec [ 'spec' ] : if old_item [ 'variable' ] == survey_item [ 'variable' ] :
if survey_item . get ( "default" ) and survey_item [ "default" ] . startswith ( '$encrypted$' ) : old_spec = obj . survey_spec for old_item in old_spec [ 'spec' ] : if old_item [ 'variable' ] == survey_item [ 'variable' ] :
CHANGE_BINARY_OPERAND
[["Insert", ["boolean_operator", 3, 20, 3, 97], ["call", "N0"], 0], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Insert", "N1", ["identifier:survey_item", "T"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Update", ["identifier:survey_item", 3, 33, 3, 44], "get"], ["Move", "N1", ["identifier:survey_item", 3, 33, 3, 44], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["string:\"default\"", "T"], 1], ["Insert", "N2", ["):)", "T"], 2], ["Delete", ["string:\"default\"", 3, 20, 3, 29]], ["Delete", ["in:in", 3, 30, 3, 32]], ["Delete", ["comparison_operator", 3, 20, 3, 44]]]
gconsidine/awx@e36cd17c161c21c744bd4654f605e2b47cd702a5
fix a minor bug in survey_spec password field handling see: #5477
[ { "sha": "33cff82049954e0ed0316a459dd548677f8c3032", "filename": "awx/api/views.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/e36cd17c161c21c744bd4654f605e2b47cd702a5/awx%2Fapi%2Fviews.py", "raw_url": "https://github.com/gconsidine/awx/raw/e36cd17c161c21c744bd4654f605e2b47cd702a5/awx%2Fapi%2Fviews.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fviews.py?ref=e36cd17c161c21c744bd4654f605e2b47cd702a5", "patch": "@@ -2487,7 +2487,7 @@ def post(self, request, *args, **kwargs):\n return Response(dict(error=_(\"'required' missing from survey question %s.\") % str(idx)), status=status.HTTP_400_BAD_REQUEST)\n \n if survey_item[\"type\"] == \"password\":\n- if \"default\" in survey_item and survey_item[\"default\"].startswith('$encrypted$'):\n+ if survey_item.get(\"default\") and survey_item[\"default\"].startswith('$encrypted$'):\n old_spec = obj.survey_spec\n for old_item in old_spec['spec']:\n if old_item['variable'] == survey_item['variable']:" } ]
awx
08825a1f4998a38473ba8b6775bdd9f8dcdd7d99
05e1348ef9d817d2f5cdec9928e41232c8cb873b
awx/main/scheduler/__init__.py
https://github.com/gconsidine/awx
true
false
true
@@ -373,7 +373,7 @@ class TaskManager(): # list of task id's from celery and now. # Note: This is an actual fix, not a reduction in the time # window that this can happen. - if task_obj.status is not 'running': + if task_obj.status != 'running': continue task_obj.status = 'failed' task_obj.job_explanation += ' '.join((
if task_obj . status is not 'running' : continue
if task_obj . status != 'running' : continue
SINGLE_STMT
[["Insert", ["comparison_operator", 3, 20, 3, 52], ["!=:!=", "T"], 1], ["Delete", ["is:is", 3, 36, 3, 38]], ["Delete", ["not:not", 3, 39, 3, 42]]]
gconsidine/awx@08825a1f4998a38473ba8b6775bdd9f8dcdd7d99
fix check running status
[ { "sha": "a48ca3ad2373f711490ffcf527b10e54c8a6a1ea", "filename": "awx/main/scheduler/__init__.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/08825a1f4998a38473ba8b6775bdd9f8dcdd7d99/awx%2Fmain%2Fscheduler%2F__init__.py", "raw_url": "https://github.com/gconsidine/awx/raw/08825a1f4998a38473ba8b6775bdd9f8dcdd7d99/awx%2Fmain%2Fscheduler%2F__init__.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fscheduler%2F__init__.py?ref=08825a1f4998a38473ba8b6775bdd9f8dcdd7d99", "patch": "@@ -373,7 +373,7 @@ def process_celery_tasks(self, active_tasks, all_running_sorted_tasks):\n # list of task id's from celery and now.\n # Note: This is an actual fix, not a reduction in the time \n # window that this can happen.\n- if task_obj.status is not 'running':\n+ if task_obj.status != 'running':\n continue\n task_obj.status = 'failed'\n task_obj.job_explanation += ' '.join((" } ]
awx
1a946d51c13131103561c97c076f628589cdae69
d8151b05b803e20ac13f86f899ccc3a1c4908549
awx/main/tasks.py
https://github.com/gconsidine/awx
true
false
true
@@ -1218,7 +1218,7 @@ class RunProjectUpdate(BaseTask): 'scm_url': scm_url, 'scm_branch': scm_branch, 'scm_clean': project_update.scm_clean, - 'scm_delete_on_update': project_update.scm_delete_on_update, + 'scm_delete_on_update': project_update.scm_delete_on_update if project_update.job_type == 'sync' else False, 'scm_full_checkout': True if project_update.job_type == 'run' else False, 'scm_revision_output': self.revision_path })
'scm_delete_on_update' : project_update . scm_delete_on_update ,
'scm_delete_on_update' : project_update . scm_delete_on_update if project_update . job_type == 'sync' else False ,
SINGLE_STMT
[["Insert", ["expression_statement", 3, 13, 3, 73], ["conditional_expression", "N0"], 0], ["Move", "N0", ["attribute", 3, 13, 3, 72], 0], ["Insert", "N0", ["if:if", "T"], 1], ["Insert", "N0", ["comparison_operator", "N1"], 2], ["Insert", "N0", ["else:else", "T"], 3], ["Insert", "N0", ["false:False", "T"], 4], ["Insert", "N1", ["attribute", "N2"], 0], ["Insert", "N1", ["==:==", "T"], 1], ["Insert", "N1", ["string:'sync'", "T"], 2], ["Insert", "N2", ["identifier:project_update", "T"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:job_type", "T"], 2]]
gconsidine/awx@1a946d51c13131103561c97c076f628589cdae69
Fix an issue with sync vs run project updates We need to not remove the source tree on project update run which happens in the context of the job run to prevent the directory from being deleted out from under the job runners. Since those will inherently not block each other as the project update would
[ { "sha": "ce4c9bc5bd538edff81246166caab9047651c0ea", "filename": "awx/main/tasks.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/1a946d51c13131103561c97c076f628589cdae69/awx%2Fmain%2Ftasks.py", "raw_url": "https://github.com/gconsidine/awx/raw/1a946d51c13131103561c97c076f628589cdae69/awx%2Fmain%2Ftasks.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftasks.py?ref=1a946d51c13131103561c97c076f628589cdae69", "patch": "@@ -1218,7 +1218,7 @@ def build_args(self, project_update, **kwargs):\n 'scm_url': scm_url,\n 'scm_branch': scm_branch,\n 'scm_clean': project_update.scm_clean,\n- 'scm_delete_on_update': project_update.scm_delete_on_update,\n+ 'scm_delete_on_update': project_update.scm_delete_on_update if project_update.job_type == 'sync' else False,\n 'scm_full_checkout': True if project_update.job_type == 'run' else False,\n 'scm_revision_output': self.revision_path\n })" } ]
awx
98daaece73f27d064631a39b3768ee2925299ef1
242e1b543c43b0aecfba7a8cb8f305c870935004
awx/main/tasks.py
https://github.com/gconsidine/awx
true
false
true
@@ -1218,7 +1218,7 @@ class RunProjectUpdate(BaseTask): 'scm_url': scm_url, 'scm_branch': scm_branch, 'scm_clean': project_update.scm_clean, - 'scm_delete_on_update': project_update.scm_delete_on_update if project_update.job_type == 'sync' else False, + 'scm_delete_on_update': project_update.scm_delete_on_update if project_update.job_type == 'check' else False, 'scm_full_checkout': True if project_update.job_type == 'run' else False, 'scm_revision_output': self.revision_path })
'scm_delete_on_update' : project_update . scm_delete_on_update if project_update . job_type == 'sync' else False ,
'scm_delete_on_update' : project_update . scm_delete_on_update if project_update . job_type == 'check' else False ,
CHANGE_STRING_LITERAL
[["Update", ["string:'sync'", 3, 103, 3, 109], "'check'"]]
gconsidine/awx@98daaece73f27d064631a39b3768ee2925299ef1
Switch job_type to check from sync when detecting delete_on_update
[ { "sha": "584e6ce0bbd20783d9dd8d5c7ca6c0788f91ab52", "filename": "awx/main/tasks.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/98daaece73f27d064631a39b3768ee2925299ef1/awx%2Fmain%2Ftasks.py", "raw_url": "https://github.com/gconsidine/awx/raw/98daaece73f27d064631a39b3768ee2925299ef1/awx%2Fmain%2Ftasks.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftasks.py?ref=98daaece73f27d064631a39b3768ee2925299ef1", "patch": "@@ -1218,7 +1218,7 @@ def build_args(self, project_update, **kwargs):\n 'scm_url': scm_url,\n 'scm_branch': scm_branch,\n 'scm_clean': project_update.scm_clean,\n- 'scm_delete_on_update': project_update.scm_delete_on_update if project_update.job_type == 'sync' else False,\n+ 'scm_delete_on_update': project_update.scm_delete_on_update if project_update.job_type == 'check' else False,\n 'scm_full_checkout': True if project_update.job_type == 'run' else False,\n 'scm_revision_output': self.revision_path\n })" } ]
awx
54bcdccdab8a0a85ec54bc205008508967b3c882
9db49d981911fd3a0973c104fdedf62d042f8a8e
awx/main/conf.py
https://github.com/gconsidine/awx
true
false
false
@@ -322,5 +322,5 @@ register( help_text=_('Useful to uniquely identify Tower instances.'), category=_('Logging'), category_slug='logging', - default=None, + default='', )
default = None ,
default = '' ,
SINGLE_TOKEN
[["Insert", ["expression_list", 3, 13, 3, 18], ["string:''", "T"], 0], ["Delete", ["none:None", 3, 13, 3, 17]]]
gconsidine/awx@54bcdccdab8a0a85ec54bc205008508967b3c882
give a default other than None 1. If the setting isn't in the database, then the specified default is returned. 2. If the setting IS in the database, then the 'value' field, from the database, is returned; Unless, the value is null in the database, then the defined default is returned. * Take case 1. If the default=None then an AttributeError is raised * Thus, by setting default='', in case 1. We don't get an AttributeError because '' will be returned. * Also note that we don't allow_null=True because then the database entry could be None. Accessing the setting would raise an AttributeError in this case. Further, since we set default='' it doesn't matter if null is in the database. TL;DR set default=<something_other_than_None>
[ { "sha": "098b84f639988d8f87a2b14f883d90a16b89e7aa", "filename": "awx/main/conf.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/54bcdccdab8a0a85ec54bc205008508967b3c882/awx%2Fmain%2Fconf.py", "raw_url": "https://github.com/gconsidine/awx/raw/54bcdccdab8a0a85ec54bc205008508967b3c882/awx%2Fmain%2Fconf.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fconf.py?ref=54bcdccdab8a0a85ec54bc205008508967b3c882", "patch": "@@ -322,5 +322,5 @@ def _load_default_license_from_file():\n help_text=_('Useful to uniquely identify Tower instances.'),\n category=_('Logging'),\n category_slug='logging',\n- default=None,\n+ default='',\n )" } ]
awx
50ccd1b003c05081bb80a269ffe98b512c911f1a
ed5a01c0cf5c7c1aaf5bf5206c78a9e0ef7e46af
awx/main/models/projects.py
https://github.com/gconsidine/awx
true
false
true
@@ -142,7 +142,7 @@ class ProjectOptions(models.Model): cred = self.credential if cred: if cred.kind != 'scm': - raise ValidationError(_("Credential kind must be either 'scm'.")) + raise ValidationError(_("Credential kind must be 'scm'.")) try: if self.scm_type == 'insights': self.scm_url = settings.INSIGHTS_URL_BASE
raise ValidationError ( _ ( "Credential kind must be either 'scm'." ) )
raise ValidationError ( _ ( "Credential kind must be 'scm'." ) )
CHANGE_STRING_LITERAL
[["Update", ["string:\"Credential kind must be either 'scm'.\"", 3, 41, 3, 80], "\"Credential kind must be 'scm'.\""]]
gconsidine/awx@50ccd1b003c05081bb80a269ffe98b512c911f1a
Fix validation error grammar
[ { "sha": "bf60e5b77cd30b51dd0f9df9d75f5d57e3b3735b", "filename": "awx/main/models/projects.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/50ccd1b003c05081bb80a269ffe98b512c911f1a/awx%2Fmain%2Fmodels%2Fprojects.py", "raw_url": "https://github.com/gconsidine/awx/raw/50ccd1b003c05081bb80a269ffe98b512c911f1a/awx%2Fmain%2Fmodels%2Fprojects.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmodels%2Fprojects.py?ref=50ccd1b003c05081bb80a269ffe98b512c911f1a", "patch": "@@ -142,7 +142,7 @@ def clean_credential(self):\n cred = self.credential\n if cred:\n if cred.kind != 'scm':\n- raise ValidationError(_(\"Credential kind must be either 'scm'.\"))\n+ raise ValidationError(_(\"Credential kind must be 'scm'.\"))\n try:\n if self.scm_type == 'insights':\n self.scm_url = settings.INSIGHTS_URL_BASE" } ]
awx
47a894a3396f6ce66e4e741a8035e9273dff244f
4fef391914e9651e0aae7826b8eb5a4ffcb7ae8b
awx/main/tests/functional/core/test_licenses.py
https://github.com/gconsidine/awx
true
false
true
@@ -100,7 +100,7 @@ def test_cloudforms_license(mocker): task_enhancer = TaskEnhancer() vdata = task_enhancer.validate_enhancements() assert vdata['compliant'] is True - assert vdata['subscription_name'] == "Cloudforms License" + assert vdata['subscription_name'] == "Red Hat CloudForms License" assert vdata['available_instances'] == 9999999 assert vdata['license_type'] == 'enterprise' assert vdata['features']['ha'] is True
assert vdata [ 'subscription_name' ] == "Cloudforms License"
assert vdata [ 'subscription_name' ] == "Red Hat CloudForms License"
CHANGE_STRING_LITERAL
[["Update", ["string:\"Cloudforms License\"", 3, 46, 3, 66], "\"Red Hat CloudForms License\""]]
gconsidine/awx@47a894a3396f6ce66e4e741a8035e9273dff244f
Fix up cloudforms license unit test
[ { "sha": "7432dbbdcdbd19687d4774cf6fdd01208d31c718", "filename": "awx/main/tests/functional/core/test_licenses.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/47a894a3396f6ce66e4e741a8035e9273dff244f/awx%2Fmain%2Ftests%2Ffunctional%2Fcore%2Ftest_licenses.py", "raw_url": "https://github.com/gconsidine/awx/raw/47a894a3396f6ce66e4e741a8035e9273dff244f/awx%2Fmain%2Ftests%2Ffunctional%2Fcore%2Ftest_licenses.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftests%2Ffunctional%2Fcore%2Ftest_licenses.py?ref=47a894a3396f6ce66e4e741a8035e9273dff244f", "patch": "@@ -100,7 +100,7 @@ def test_cloudforms_license(mocker):\n task_enhancer = TaskEnhancer()\n vdata = task_enhancer.validate_enhancements()\n assert vdata['compliant'] is True\n- assert vdata['subscription_name'] == \"Cloudforms License\"\n+ assert vdata['subscription_name'] == \"Red Hat CloudForms License\"\n assert vdata['available_instances'] == 9999999\n assert vdata['license_type'] == 'enterprise'\n assert vdata['features']['ha'] is True" } ]
awx
34b697ce17a988c469d3850e4f05e4be13897e51
e65ef35acf8be8b33d86de83ea43fe1fe6ab3559
awx/api/views.py
https://github.com/gconsidine/awx
true
false
false
@@ -1120,7 +1120,7 @@ class ProjectSchedulesList(SubListCreateAPIView): class ProjectScmInventorySources(SubListCreateAPIView): view_name = _("Project SCM Inventory Sources") - model = Inventory + model = InventorySource serializer_class = InventorySourceSerializer parent_model = Project relationship = 'scm_inventory_sources'
model = Inventory
model = InventorySource
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:Inventory", 3, 13, 3, 22], "InventorySource"]]
gconsidine/awx@34b697ce17a988c469d3850e4f05e4be13897e51
Fix 500 error in ProjectScmInventorySources
[ { "sha": "e8b11a337388ab8eb28ebbdb50f0acfa6987d369", "filename": "awx/api/views.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/34b697ce17a988c469d3850e4f05e4be13897e51/awx%2Fapi%2Fviews.py", "raw_url": "https://github.com/gconsidine/awx/raw/34b697ce17a988c469d3850e4f05e4be13897e51/awx%2Fapi%2Fviews.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fviews.py?ref=34b697ce17a988c469d3850e4f05e4be13897e51", "patch": "@@ -1120,7 +1120,7 @@ class ProjectSchedulesList(SubListCreateAPIView):\n class ProjectScmInventorySources(SubListCreateAPIView):\n \n view_name = _(\"Project SCM Inventory Sources\")\n- model = Inventory\n+ model = InventorySource\n serializer_class = InventorySourceSerializer\n parent_model = Project\n relationship = 'scm_inventory_sources'" } ]
awx
95b09795a8d6998b7cfe157110a97bea50161e01
3ed9ebed89163e84b9a3cfed50435748c923303d
awx/main/utils/formatters.py
https://github.com/gconsidine/awx
true
false
true
@@ -25,7 +25,7 @@ class LogstashFormatter(LogstashFormatterVersion1): self.host_id = settings_module.CLUSTER_HOST_ID if hasattr(settings_module, 'LOG_AGGREGATOR_TOWER_UUID'): self.tower_uuid = settings_module.LOG_AGGREGATOR_TOWER_UUID - self.message_type = settings_module.LOG_AGGREGATOR_TYPE + self.message_type = getattr(settings_module, 'LOG_AGGREGATOR_TYPE', 'other') return ret def reformat_data_for_log(self, raw_data, kind=None):
self . message_type = settings_module . LOG_AGGREGATOR_TYPE
self . message_type = getattr ( settings_module , 'LOG_AGGREGATOR_TYPE' , 'other' )
SINGLE_STMT
[["Insert", ["assignment", 3, 13, 3, 68], ["call", "N0"], 2], ["Insert", "N0", ["identifier:getattr", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Move", "N1", ["identifier:settings_module", 3, 33, 3, 48], 1], ["Insert", "N1", [",:,", "T"], 2], ["Insert", "N1", ["string:'LOG_AGGREGATOR_TYPE'", "T"], 3], ["Insert", "N1", [",:,", "T"], 4], ["Insert", "N1", ["string:'other'", "T"], 5], ["Insert", "N1", ["):)", "T"], 6], ["Delete", [".:.", 3, 48, 3, 49]], ["Delete", ["identifier:LOG_AGGREGATOR_TYPE", 3, 49, 3, 68]], ["Delete", ["attribute", 3, 33, 3, 68]]]
gconsidine/awx@95b09795a8d6998b7cfe157110a97bea50161e01
Handle 500 error with toggling log aggregator enable.
[ { "sha": "a046f02b377bec8f7d331626518f841adf767708", "filename": "awx/main/utils/formatters.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/95b09795a8d6998b7cfe157110a97bea50161e01/awx%2Fmain%2Futils%2Fformatters.py", "raw_url": "https://github.com/gconsidine/awx/raw/95b09795a8d6998b7cfe157110a97bea50161e01/awx%2Fmain%2Futils%2Fformatters.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Futils%2Fformatters.py?ref=95b09795a8d6998b7cfe157110a97bea50161e01", "patch": "@@ -25,7 +25,7 @@ def __init__(self, **kwargs):\n self.host_id = settings_module.CLUSTER_HOST_ID\n if hasattr(settings_module, 'LOG_AGGREGATOR_TOWER_UUID'):\n self.tower_uuid = settings_module.LOG_AGGREGATOR_TOWER_UUID\n- self.message_type = settings_module.LOG_AGGREGATOR_TYPE\n+ self.message_type = getattr(settings_module, 'LOG_AGGREGATOR_TYPE', 'other')\n return ret\n \n def reformat_data_for_log(self, raw_data, kind=None):" } ]
aetherbot
36e94de95f8d72b806450a726c6187a81005b3dc
ed50bc746c7c904013c6260496579bbea896b9f9
core/main.py
https://github.com/ihatevim/aetherbot
true
false
true
@@ -157,7 +157,7 @@ def main(conn, out): prefix = r'^(?:[' + commandprefix + ']|' command_re = prefix + inp.conn.nick - command_re += r'[:]+\s+)(\w+)(?:$|\s+)(.*)' + command_re += r'[,;:]+\s+)(\w+)(?:$|\s+)(.*)' m = re.match(command_re, inp.lastparam)
command_re += r'[:]+\s+)(\w+)(?:$|\s+)(.*)'
command_re += r'[,;:]+\s+)(\w+)(?:$|\s+)(.*)'
CHANGE_STRING_LITERAL
[["Update", ["string:r'[:]+\\s+)(\\w+)(?:$|\\s+)(.*)'", 3, 23, 3, 52], "r'[,;:]+\\s+)(\\w+)(?:$|\\s+)(.*)'"]]
ihatevim/aetherbot@36e94de95f8d72b806450a726c6187a81005b3dc
fixed triggers for bot (via ame)
[ { "sha": "041d67ee90a2cd9e9cb6e896fd65135a4e498a29", "filename": "core/main.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ihatevim/aetherbot/blob/36e94de95f8d72b806450a726c6187a81005b3dc/core%2Fmain.py", "raw_url": "https://github.com/ihatevim/aetherbot/raw/36e94de95f8d72b806450a726c6187a81005b3dc/core%2Fmain.py", "contents_url": "https://api.github.com/repos/ihatevim/aetherbot/contents/core%2Fmain.py?ref=36e94de95f8d72b806450a726c6187a81005b3dc", "patch": "@@ -157,7 +157,7 @@ def main(conn, out):\n prefix = r'^(?:[' + commandprefix + ']|'\n \n command_re = prefix + inp.conn.nick\n- command_re += r'[:]+\\s+)(\\w+)(?:$|\\s+)(.*)'\n+ command_re += r'[,;:]+\\s+)(\\w+)(?:$|\\s+)(.*)'\n \n m = re.match(command_re, inp.lastparam)\n " } ]
aetherbot
d453dfefb79363dd3ca2bbdad904fcbca86305a1
7d545fd10065c78aa9b6f02a8e2f078f7a547078
core/config.py
https://github.com/ihatevim/aetherbot
true
false
false
@@ -43,7 +43,7 @@ if not os.path.exists('config'): }, "wrapper": { - "backend": "screen" + "backend": "daemon" }, "censored_strings": [
": { "backend": "screen" } ,
": { "backend": "daemon" } ,
CHANGE_STRING_LITERAL
[["Update", ["string:\":\n {\n \"backend\": \"screen\"\n }", 1, 19, 4, 12], "\":\n {\n \"backend\": \"daemon\"\n }"]]
ihatevim/aetherbot@d453dfefb79363dd3ca2bbdad904fcbca86305a1
whoops, forgot to change default
[ { "sha": "96f2302e0d3db258bb29c40ab3af53fbf9f85c23", "filename": "core/config.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ihatevim/aetherbot/blob/d453dfefb79363dd3ca2bbdad904fcbca86305a1/core%2Fconfig.py", "raw_url": "https://github.com/ihatevim/aetherbot/raw/d453dfefb79363dd3ca2bbdad904fcbca86305a1/core%2Fconfig.py", "contents_url": "https://api.github.com/repos/ihatevim/aetherbot/contents/core%2Fconfig.py?ref=d453dfefb79363dd3ca2bbdad904fcbca86305a1", "patch": "@@ -43,7 +43,7 @@ def save(conf):\n },\n \"wrapper\":\n {\n- \"backend\": \"screen\"\n+ \"backend\": \"daemon\"\n },\n \"censored_strings\":\n [" } ]
aetherbot
0f2206f68f044d90bb820f9b1f1ec610fea5df31
fa99d1eef588f6ed2f83254f4f9759d2fafa90a9
plugins/urlparse.py
https://github.com/ihatevim/aetherbot
true
false
false
@@ -18,7 +18,7 @@ def parse(match): except: return "fail" [email protected](r'([a-zA-Z]://|www\.)?[^ ]+(\.[a-z]+)+') [email protected](r'(^[^\.])([a-zA-Z]://|www\.)?[^ ]+(\.[a-z]+)\/(.*)') def urlparser(match, say=None): url = urlnorm.normalize(match.group().encode('utf-8')) if url[:7] != "http://":
@ hook . regex ( r'([a-zA-Z]://|www\.)?[^ ]+(\.[a-z]+)+' ) def urlparser ( match , say = None ) : url = urlnorm . normalize ( match . group ( ) . encode ( 'utf-8' ) ) if url [ : 7 ] != "http://" :
@ hook . regex ( r'(^[^\.])([a-zA-Z]://|www\.)?[^ ]+(\.[a-z]+)\/(.*)' ) def urlparser ( match , say = None ) : url = urlnorm . normalize ( match . group ( ) . encode ( 'utf-8' ) ) if url [ : 7 ] != "http://" :
CHANGE_STRING_LITERAL
[["Update", ["string:r'([a-zA-Z]://|www\\.)?[^ ]+(\\.[a-z]+)+'", 3, 13, 3, 52], "r'(^[^\\.])([a-zA-Z]://|www\\.)?[^ ]+(\\.[a-z]+)\\/(.*)'"]]
ihatevim/aetherbot@0f2206f68f044d90bb820f9b1f1ec610fea5df31
fixed regex in urlparse.py to not parse commands, also made it respece paths
[ { "sha": "e1d5328b995ea70b65d5b01402f2fe940f0fe4dc", "filename": "plugins/urlparse.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ihatevim/aetherbot/blob/0f2206f68f044d90bb820f9b1f1ec610fea5df31/plugins%2Furlparse.py", "raw_url": "https://github.com/ihatevim/aetherbot/raw/0f2206f68f044d90bb820f9b1f1ec610fea5df31/plugins%2Furlparse.py", "contents_url": "https://api.github.com/repos/ihatevim/aetherbot/contents/plugins%2Furlparse.py?ref=0f2206f68f044d90bb820f9b1f1ec610fea5df31", "patch": "@@ -18,7 +18,7 @@ def parse(match):\n except:\n return \"fail\"\n \[email protected](r'([a-zA-Z]://|www\\.)?[^ ]+(\\.[a-z]+)+')\[email protected](r'(^[^\\.])([a-zA-Z]://|www\\.)?[^ ]+(\\.[a-z]+)\\/(.*)')\n def urlparser(match, say=None):\n url = urlnorm.normalize(match.group().encode('utf-8'))\n if url[:7] != \"http://\":" } ]
aetherbot
dec3690373f3ef4fecbc156bf74567739ed0a831
819620ddb187b2c656979221a757b2e0e9b9079d
plugins/urlparse.py
https://github.com/ihatevim/aetherbot
true
false
false
@@ -18,7 +18,7 @@ def parse(match): except: return "fail" [email protected](r'(^[^\.])([a-zA-Z]://|www\.)?[^ ]+(\.[a-z]+)(\/)?(.*)') [email protected](r'([a-zA-Z]://|www\.)?[^ ]+(\.[a-z]+)(\/)?(.*)') def urlparser(match, say=None): url = urlnorm.normalize(match.group().encode('utf-8')) if url[:7] != "http://":
@ hook . regex ( r'(^[^\.])([a-zA-Z]://|www\.)?[^ ]+(\.[a-z]+)(\/)?(.*)' ) def urlparser ( match , say = None ) : url = urlnorm . normalize ( match . group ( ) . encode ( 'utf-8' ) ) if url [ : 7 ] != "http://" :
@ hook . regex ( r'([a-zA-Z]://|www\.)?[^ ]+(\.[a-z]+)(\/)?(.*)' ) def urlparser ( match , say = None ) : url = urlnorm . normalize ( match . group ( ) . encode ( 'utf-8' ) ) if url [ : 7 ] != "http://" :
CHANGE_STRING_LITERAL
[["Update", ["string:r'(^[^\\.])([a-zA-Z]://|www\\.)?[^ ]+(\\.[a-z]+)(\\/)?(.*)'", 3, 13, 3, 68], "r'([a-zA-Z]://|www\\.)?[^ ]+(\\.[a-z]+)(\\/)?(.*)'"]]
ihatevim/aetherbot@dec3690373f3ef4fecbc156bf74567739ed0a831
null
null
aetherbot
75c17d5723ba0267e17ceaf3860a03c9c04bb744
063ded0f472d3c9f8ef23ac4fbeddcc078db4f95
plugins/admin.py
https://github.com/ihatevim/aetherbot
true
false
false
@@ -14,7 +14,7 @@ def admins(inp, bot=None): return ", ".join(admins) [email protected](autohelp=False, adminOnly=True) [email protected](autohelp=False, adminonly=True) def stop(inp, input=None, db=None, notice=None): ".stop [reason] -- Kills the bot with [reason] as its quit message." if inp:
@ hook . command ( autohelp = False , adminOnly = True ) def stop ( inp , input = None , db = None , notice = None ) : ".stop [reason] -- Kills the bot with [reason] as its quit message." if inp :
@ hook . command ( autohelp = False , adminonly = True ) def stop ( inp , input = None , db = None , notice = None ) : ".stop [reason] -- Kills the bot with [reason] as its quit message." if inp :
CHANGE_KEYWORD_ARGUMENT_USED
[["Update", ["identifier:adminOnly", 3, 31, 3, 40], "adminonly"]]
ihatevim/aetherbot@75c17d5723ba0267e17ceaf3860a03c9c04bb744
another quick bugfix
[ { "sha": "d894d51cf4f7dc18b9b38131b539814470699d5e", "filename": "plugins/admin.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ihatevim/aetherbot/blob/75c17d5723ba0267e17ceaf3860a03c9c04bb744/plugins%2Fadmin.py", "raw_url": "https://github.com/ihatevim/aetherbot/raw/75c17d5723ba0267e17ceaf3860a03c9c04bb744/plugins%2Fadmin.py", "contents_url": "https://api.github.com/repos/ihatevim/aetherbot/contents/plugins%2Fadmin.py?ref=75c17d5723ba0267e17ceaf3860a03c9c04bb744", "patch": "@@ -14,7 +14,7 @@ def admins(inp, bot=None):\n return \", \".join(admins)\n \n \[email protected](autohelp=False, adminOnly=True)\[email protected](autohelp=False, adminonly=True)\n def stop(inp, input=None, db=None, notice=None):\n \".stop [reason] -- Kills the bot with [reason] as its quit message.\"\n if inp:" } ]
aetherbot
3acd6536a6387c081842739a385689b0deefe32c
297806308c9033d02415e2d1aa1a6ca29d0d278e
plugins/fortune.py
https://github.com/ihatevim/aetherbot
true
false
false
@@ -8,7 +8,7 @@ with open("plugins/data/fortunes.txt") as f: for line in f.readlines(): if line.startswith("//"): continue - fortunes.append(line) + fortunes.append(line.strip()) @hook.command(autohelp=False) def fortune(inp, nick=None, say=None, input=None):
fortunes . append ( line )
fortunes . append ( line . strip ( ) )
ADD_METHOD_CALL
[["Insert", ["argument_list", 3, 24, 3, 30], ["call", "N0"], 1], ["Insert", ["argument_list", 3, 24, 3, 30], ["):)", "T"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Move", "N1", ["identifier:line", 3, 25, 3, 29], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:strip", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Move", "N2", ["):)", 3, 29, 3, 30], 1]]
ihatevim/aetherbot@3acd6536a6387c081842739a385689b0deefe32c
null
null
aetherbot
d65f1557405c26c0d8fcb25e1c29eb80b962e10c
8d6a2d71d419607e4ec13fb20b38a261f775fe6c
plugins/fmylife.py
https://github.com/ihatevim/aetherbot
true
false
true
@@ -18,7 +18,7 @@ def refresh_cache(): for e in soup.findAll('div', {'class': 'post article'}): id = int(e['id']) - text = e.find('p', text=True) + text = ''.join(e.find('p').findAll(text=True)) fml_cache[id] = text # do an initial refresh of the cache
text = e . find ( 'p' , text = True )
text = '' . join ( e . find ( 'p' ) . findAll ( text = True ) )
SINGLE_STMT
[["Insert", ["call", 3, 16, 3, 38], ["attribute", "N0"], 0], ["Insert", ["call", 3, 16, 3, 38], ["argument_list", "N1"], 1], ["Insert", "N0", ["string:''", "T"], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:join", "T"], 2], ["Insert", "N1", ["(:(", "T"], 0], ["Insert", "N1", ["call", "N2"], 1], ["Insert", "N1", ["):)", "T"], 2], ["Insert", "N2", ["attribute", "N3"], 0], ["Move", "N2", ["argument_list", 3, 22, 3, 38], 1], ["Insert", "N3", ["call", "N4"], 0], ["Insert", "N3", [".:.", "T"], 1], ["Insert", "N3", ["identifier:findAll", "T"], 2], ["Move", "N4", ["attribute", 3, 16, 3, 22], 0], ["Insert", "N4", ["argument_list", "N5"], 1], ["Insert", "N5", ["(:(", "T"], 0], ["Move", "N5", ["string:'p'", 3, 23, 3, 26], 1], ["Insert", "N5", ["):)", "T"], 2], ["Delete", [",:,", 3, 26, 3, 27]]]
ihatevim/aetherbot@d65f1557405c26c0d8fcb25e1c29eb80b962e10c
Workaround for stupid bug
[ { "sha": "68d1d895f49df00ad55663d45b2af1f3789ed880", "filename": "plugins/fmylife.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ihatevim/aetherbot/blob/d65f1557405c26c0d8fcb25e1c29eb80b962e10c/plugins%2Ffmylife.py", "raw_url": "https://github.com/ihatevim/aetherbot/raw/d65f1557405c26c0d8fcb25e1c29eb80b962e10c/plugins%2Ffmylife.py", "contents_url": "https://api.github.com/repos/ihatevim/aetherbot/contents/plugins%2Ffmylife.py?ref=d65f1557405c26c0d8fcb25e1c29eb80b962e10c", "patch": "@@ -18,7 +18,7 @@ def refresh_cache():\n \n for e in soup.findAll('div', {'class': 'post article'}):\n id = int(e['id'])\n- text = e.find('p', text=True)\n+ text = ''.join(e.find('p').findAll(text=True))\n fml_cache[id] = text\n \n # do an initial refresh of the cache" } ]
aetherbot
ebdf34256e7249f19284e74b7ed3b780511900b2
2f492a4659b53118588a1984f6311edf72d9f154
plugins/admin.py
https://github.com/ihatevim/aetherbot
true
false
true
@@ -38,7 +38,7 @@ def restart(inp, nick=None, conn=None): else: conn.cmd("QUIT", ["Restarted by %s." % nick]) time.sleep(5) - subprocess.call(["./cloudbot", "restart"]) + os.execl("./cloudbot", "restart") @hook.command(autohelp=False, adminonly=True)
subprocess . call ( [ "./cloudbot" , "restart" ] )
os . execl ( "./cloudbot" , "restart" )
SINGLE_STMT
[["Update", ["identifier:subprocess", 3, 5, 3, 15], "os"], ["Update", ["identifier:call", 3, 16, 3, 20], "execl"], ["Move", ["argument_list", 3, 20, 3, 47], ["string:\"./cloudbot\"", 3, 22, 3, 34], 1], ["Move", ["argument_list", 3, 20, 3, 47], [",:,", 3, 34, 3, 35], 2], ["Move", ["argument_list", 3, 20, 3, 47], ["string:\"restart\"", 3, 36, 3, 45], 3], ["Delete", ["[:[", 3, 21, 3, 22]], ["Delete", ["]:]", 3, 45, 3, 46]], ["Delete", ["list", 3, 21, 3, 46]]]
ihatevim/aetherbot@ebdf34256e7249f19284e74b7ed3b780511900b2
null
null
aetherbot
d717292c135c9919ea4abf0b6ee227a5a293e93a
7b1cc2ff0e4cbfeb1ad8cb29dffc937d74266250
plugins/misc.py
https://github.com/ihatevim/aetherbot
true
false
true
@@ -65,7 +65,7 @@ def onjoin(paraml, conn=None, bot=None): # system info command @hook.command(autohelp=False) def system(inp): - ".system -- retrieves information about the host system" + ".system -- Retrieves information about the host system." python_version = platform.python_version() os = platform.platform(aliased=True) cpu = platform.machine()
".system -- retrieves information about the host system"
".system -- Retrieves information about the host system."
CHANGE_STRING_LITERAL
[["Update", ["string:\".system -- retrieves information about the host system\"", 3, 5, 3, 61], "\".system -- Retrieves information about the host system.\""]]
ihatevim/aetherbot@d717292c135c9919ea4abf0b6ee227a5a293e93a
fix help message
[ { "sha": "395328855dcd7538265ad94c14e0d6fa5fc9a748", "filename": "plugins/misc.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ihatevim/aetherbot/blob/d717292c135c9919ea4abf0b6ee227a5a293e93a/plugins%2Fmisc.py", "raw_url": "https://github.com/ihatevim/aetherbot/raw/d717292c135c9919ea4abf0b6ee227a5a293e93a/plugins%2Fmisc.py", "contents_url": "https://api.github.com/repos/ihatevim/aetherbot/contents/plugins%2Fmisc.py?ref=d717292c135c9919ea4abf0b6ee227a5a293e93a", "patch": "@@ -65,7 +65,7 @@ def onjoin(paraml, conn=None, bot=None):\n # system info command\n @hook.command(autohelp=False)\n def system(inp):\n- \".system -- retrieves information about the host system\"\n+ \".system -- Retrieves information about the host system.\"\n python_version = platform.python_version()\n os = platform.platform(aliased=True)\n cpu = platform.machine()" } ]
aetherbot
0e6840b0a8a52e185618daa4f2cf1619c928076a
10e975ed69dbfd4e80e26d52be0108565dac434c
plugins/admin.py
https://github.com/ihatevim/aetherbot
true
false
true
@@ -155,7 +155,7 @@ def me(inp, conn=None, chan=None, notice=None): for x in split[0:]: message = message + x + " " message = message[:-1] - out = "PRIVMSG %s :\x01ACTION %s\x01" % (input.chan, message) + out = "PRIVMSG %s :\x01ACTION %s\x01" % (chan, message) conn.send(out)
out = "PRIVMSG %s :\x01ACTION %s\x01" % ( input . chan , message )
out = "PRIVMSG %s :\x01ACTION %s\x01" % ( chan , message )
SINGLE_STMT
[["Move", ["tuple", 3, 49, 3, 70], ["identifier:chan", 3, 56, 3, 60], 1], ["Delete", ["identifier:input", 3, 50, 3, 55]], ["Delete", [".:.", 3, 55, 3, 56]], ["Delete", ["attribute", 3, 50, 3, 60]]]
ihatevim/aetherbot@0e6840b0a8a52e185618daa4f2cf1619c928076a
.me bugfix
[ { "sha": "dcd6c4e6df3db5e8840359e49ae40d019da14c49", "filename": "plugins/admin.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ihatevim/aetherbot/blob/0e6840b0a8a52e185618daa4f2cf1619c928076a/plugins%2Fadmin.py", "raw_url": "https://github.com/ihatevim/aetherbot/raw/0e6840b0a8a52e185618daa4f2cf1619c928076a/plugins%2Fadmin.py", "contents_url": "https://api.github.com/repos/ihatevim/aetherbot/contents/plugins%2Fadmin.py?ref=0e6840b0a8a52e185618daa4f2cf1619c928076a", "patch": "@@ -155,7 +155,7 @@ def me(inp, conn=None, chan=None, notice=None):\n for x in split[0:]:\n message = message + x + \" \"\n message = message[:-1]\n- out = \"PRIVMSG %s :\\x01ACTION %s\\x01\" % (input.chan, message)\n+ out = \"PRIVMSG %s :\\x01ACTION %s\\x01\" % (chan, message)\n conn.send(out)\n \n " } ]
wcicp-call-service
d788a7033bdd6991937b514f46f9a268376180c3
b5c812569ff7f9bf731807c4357573956cc680bd
crisiscleanup/calls/models.py
https://github.com/CrisisCleanup/wcicp-call-service
true
false
true
@@ -137,7 +137,7 @@ class Call(models.Model): db_table = 'call' def __str__(self): - return 'Call to/from {} at {}'.format(caller.phone_number, call_start) + return 'Call to/from {} at {}'.format(self.caller.phone_number, self.call_start) class CallWorksite(models.Model):
return 'Call to/from {} at {}' . format ( caller . phone_number , call_start )
return 'Call to/from {} at {}' . format ( self . caller . phone_number , self . call_start )
SINGLE_STMT
[["Insert", ["argument_list", 3, 46, 3, 79], ["attribute", "N0"], 1], ["Insert", ["argument_list", 3, 46, 3, 79], ["attribute", "N1"], 4], ["Move", "N0", ["attribute", 3, 47, 3, 66], 0], ["Move", ["identifier:caller", 3, 47, 3, 53], ["attribute", 3, 47, 3, 66], 1], ["Insert", "N0", [".:.", "T"], 2], ["Move", "N0", ["identifier:phone_number", 3, 54, 3, 66], 1], ["Insert", "N1", ["identifier:self", "T"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Move", "N1", ["identifier:call_start", 3, 68, 3, 78], 2], ["Insert", ["attribute", 3, 47, 3, 66], ["identifier:self", "T"], 0]]
CrisisCleanup/wcicp-call-service@d788a7033bdd6991937b514f46f9a268376180c3
null
null
wechat_jump_game
a5cb10aa192e9092721b93f4320ff555f9af55ed
96b12a1d673ec6e9efddcd1e2014945fdf69a018
wechat_jump_auto.py
https://github.com/Suhesong/wechat_jump_game
true
false
true
@@ -199,7 +199,7 @@ def find_piece_and_board(im): board_x_sum = 0 board_x_c = 0 - for j in range(board_x_start, board_x_end): + for j in range(int(board_x_start), int(board_x_end)): pixel = im_pixel[j,i] # 修掉脑袋比下一个小格子还高的情况的 bug if abs(j - piece_x) < piece_body_width:
for j in range ( board_x_start , board_x_end ) : pixel = im_pixel [ j , i ] if abs ( j - piece_x ) < piece_body_width :
for j in range ( int ( board_x_start ) , int ( board_x_end ) ) : pixel = im_pixel [ j , i ] if abs ( j - piece_x ) < piece_body_width :
SINGLE_STMT
[["Insert", ["argument_list", 3, 23, 3, 51], ["call", "N0"], 1], ["Insert", ["argument_list", 3, 23, 3, 51], ["call", "N1"], 4], ["Insert", ["argument_list", 3, 23, 3, 51], ["):)", "T"], 5], ["Insert", "N0", ["identifier:int", "T"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Insert", "N1", ["identifier:int", "T"], 0], ["Insert", "N1", ["argument_list", "N3"], 1], ["Insert", "N2", ["(:(", "T"], 0], ["Move", "N2", ["identifier:board_x_start", 3, 24, 3, 37], 1], ["Move", "N2", ["):)", 3, 50, 3, 51], 2], ["Insert", "N3", ["(:(", "T"], 0], ["Move", "N3", ["identifier:board_x_end", 3, 39, 3, 50], 1], ["Insert", "N3", ["):)", "T"], 2]]
Suhesong/wechat_jump_game@a5cb10aa192e9092721b93f4320ff555f9af55ed
修复报错#501 修复报错https://github.com/wangshub/wechat_jump_game/issues/501
[ { "sha": "f49ced9761da2818fddcd4571655c6636245aa4a", "filename": "wechat_jump_auto.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Suhesong/wechat_jump_game/blob/a5cb10aa192e9092721b93f4320ff555f9af55ed/wechat_jump_auto.py", "raw_url": "https://github.com/Suhesong/wechat_jump_game/raw/a5cb10aa192e9092721b93f4320ff555f9af55ed/wechat_jump_auto.py", "contents_url": "https://api.github.com/repos/Suhesong/wechat_jump_game/contents/wechat_jump_auto.py?ref=a5cb10aa192e9092721b93f4320ff555f9af55ed", "patch": "@@ -199,7 +199,7 @@ def find_piece_and_board(im):\n board_x_sum = 0\n board_x_c = 0\n \n- for j in range(board_x_start, board_x_end):\n+ for j in range(int(board_x_start), int(board_x_end)):\n pixel = im_pixel[j,i]\n # 修掉脑袋比下一个小格子还高的情况的 bug\n if abs(j - piece_x) < piece_body_width:" } ]
receding_planar_sys
c65c1ed91aaddedd8c9f4dae6ba1dd9e20e480bd
cb8704ca82e6d55b514b34606fcca7adcfe56f51
src/receding_controller.py
https://github.com/MattDerry/receding_planar_sys
true
false
true
@@ -297,7 +297,7 @@ class RecedingController: rospy.logwarn("Received an error from optimizer!") # publish optimization summary: od = OptimizationData(**optdat) - od.index = self.call_count + od.index = self.callback_count od.time = ttmp[0] self.opt_pub.publish(od) # store data:
od . index = self . call_count
od . index = self . callback_count
CHANGE_ATTRIBUTE_USED
[["Update", ["identifier:call_count", 3, 29, 3, 39], "callback_count"]]
MattDerry/receding_planar_sys@c65c1ed91aaddedd8c9f4dae6ba1dd9e20e480bd
Tiny mistake.
[ { "sha": "19bedbf81019b66461cbf7f47ce8e29b8f94491f", "filename": "src/receding_controller.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/MattDerry/receding_planar_sys/blob/c65c1ed91aaddedd8c9f4dae6ba1dd9e20e480bd/src%2Freceding_controller.py", "raw_url": "https://github.com/MattDerry/receding_planar_sys/raw/c65c1ed91aaddedd8c9f4dae6ba1dd9e20e480bd/src%2Freceding_controller.py", "contents_url": "https://api.github.com/repos/MattDerry/receding_planar_sys/contents/src%2Freceding_controller.py?ref=c65c1ed91aaddedd8c9f4dae6ba1dd9e20e480bd", "patch": "@@ -297,7 +297,7 @@ def meascb(self, data):\n rospy.logwarn(\"Received an error from optimizer!\")\n # publish optimization summary:\n od = OptimizationData(**optdat)\n- od.index = self.call_count\n+ od.index = self.callback_count\n od.time = ttmp[0]\n self.opt_pub.publish(od)\n # store data:" } ]
pcmdi_metrics
5ecc90285954e0f8befd77d366e0cb43746ee23f
162f90a505c9664acc80859471f52f557373a48f
src/python/wgne/scripts/wgne_metrics_driver.py
https://github.com/wk1984/pcmdi_metrics
true
false
false
@@ -18,7 +18,7 @@ import cdutil regions_values = {"land":100.,"ocean":0.,"lnd":100.,"ocn":0.} #Load the obs dictionary -obs_dic = json.loads(open(os.path.join(sys.prefix,"share","wgne","obs_info_dictionary.json")).read()) +obs_dic = json.loads(open(os.path.join(metrics.__path__[0],"..","..","..","..","share","wgne","obs_info_dictionary.json")).read()) class DUP(object): def __init__(self,outfile):
obs_dic = json . loads ( open ( os . path . join ( sys . prefix , "share" , "wgne" , "obs_info_dictionary.json" ) ) . read ( ) )
obs_dic = json . loads ( open ( os . path . join ( metrics . __path__ [ 0 ] , ".." , ".." , ".." , ".." , "share" , "wgne" , "obs_info_dictionary.json" ) ) . read ( ) )
SINGLE_STMT
[["Move", ["string:\"share\"", 3, 51, 3, 58], ["argument_list", 3, 39, 3, 93], 4], ["Move", ["string:\"wgne\"", 3, 59, 3, 65], ["argument_list", 3, 39, 3, 93], 3], ["Insert", ["argument_list", 3, 39, 3, 93], ["subscript", "N0"], 1], ["Insert", ["argument_list", 3, 39, 3, 93], ["string:\"..\"", "T"], 5], ["Insert", ["argument_list", 3, 39, 3, 93], ["string:\"..\"", "T"], 6], ["Insert", ["argument_list", 3, 39, 3, 93], [",:,", "T"], 7], ["Insert", ["argument_list", 3, 39, 3, 93], ["string:\"..\"", "T"], 8], ["Insert", ["argument_list", 3, 39, 3, 93], [",:,", "T"], 9], ["Insert", ["argument_list", 3, 39, 3, 93], ["string:\"..\"", "T"], 10], ["Insert", ["argument_list", 3, 39, 3, 93], [",:,", "T"], 11], ["Insert", ["argument_list", 3, 39, 3, 93], [",:,", "T"], 13], ["Insert", ["argument_list", 3, 39, 3, 93], [",:,", "T"], 15], ["Move", "N0", ["attribute", 3, 40, 3, 50], 0], ["Insert", "N0", ["[:[", "T"], 1], ["Insert", "N0", ["integer:0", "T"], 2], ["Insert", "N0", ["]:]", "T"], 3], ["Update", ["identifier:sys", 3, 40, 3, 43], "metrics"], ["Update", ["identifier:prefix", 3, 44, 3, 50], "__path__"], ["Delete", [",:,", 3, 50, 3, 51]]]
wk1984/pcmdi_metrics@5ecc90285954e0f8befd77d366e0cb43746ee23f
fixed issue where install with --prefix will end up in non standard path in mac
[ { "sha": "73d145be985c5e691c480430fa3503c85c0ef8bc", "filename": "src/python/wgne/scripts/wgne_metrics_driver.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/wk1984/pcmdi_metrics/blob/5ecc90285954e0f8befd77d366e0cb43746ee23f/src%2Fpython%2Fwgne%2Fscripts%2Fwgne_metrics_driver.py", "raw_url": "https://github.com/wk1984/pcmdi_metrics/raw/5ecc90285954e0f8befd77d366e0cb43746ee23f/src%2Fpython%2Fwgne%2Fscripts%2Fwgne_metrics_driver.py", "contents_url": "https://api.github.com/repos/wk1984/pcmdi_metrics/contents/src%2Fpython%2Fwgne%2Fscripts%2Fwgne_metrics_driver.py?ref=5ecc90285954e0f8befd77d366e0cb43746ee23f", "patch": "@@ -18,7 +18,7 @@\n regions_values = {\"land\":100.,\"ocean\":0.,\"lnd\":100.,\"ocn\":0.}\n \n #Load the obs dictionary\n-obs_dic = json.loads(open(os.path.join(sys.prefix,\"share\",\"wgne\",\"obs_info_dictionary.json\")).read())\n+obs_dic = json.loads(open(os.path.join(metrics.__path__[0],\"..\",\"..\",\"..\",\"..\",\"share\",\"wgne\",\"obs_info_dictionary.json\")).read())\n \n class DUP(object):\n def __init__(self,outfile):" } ]
awx
f6ebf80ebaaf5d6f04e9469093155297f085a286
392a7dab0d1c2e5decc73d96a0ab35e9d951efca
awx/main/access.py
https://github.com/gconsidine/awx
true
false
true
@@ -439,7 +439,7 @@ class GroupAccess(BaseAccess): model = Group def get_queryset(self): - qs = self.model.accessible_objects(self.user, 'read_role') + qs = Group.objects.filter(inventory__in=Inventory.accessible_objects(self.user, 'read_role')) qs = qs.select_related('created_by', 'modified_by', 'inventory') return qs.prefetch_related('parents', 'children', 'inventory_source').all()
qs = self . model . accessible_objects ( self . user , 'read_role' )
qs = Group . objects . filter ( inventory__in = Inventory . accessible_objects ( self . user , 'read_role' ) )
SINGLE_STMT
[["Insert", ["call", 3, 14, 3, 67], ["argument_list", "N0"], 1], ["Update", ["identifier:accessible_objects", 3, 25, 3, 43], "filter"], ["Insert", "N0", ["(:(", "T"], 0], ["Insert", "N0", ["keyword_argument", "N1"], 1], ["Insert", "N0", ["):)", "T"], 2], ["Update", ["identifier:self", 3, 14, 3, 18], "Group"], ["Update", ["identifier:model", 3, 19, 3, 24], "objects"], ["Insert", "N1", ["identifier:inventory__in", "T"], 0], ["Insert", "N1", ["=:=", "T"], 1], ["Insert", "N1", ["call", "N2"], 2], ["Insert", "N2", ["attribute", "N3"], 0], ["Move", "N2", ["argument_list", 3, 43, 3, 67], 1], ["Insert", "N3", ["identifier:Inventory", "T"], 0], ["Insert", "N3", [".:.", "T"], 1], ["Insert", "N3", ["identifier:accessible_objects", "T"], 2]]
gconsidine/awx@f6ebf80ebaaf5d6f04e9469093155297f085a286
Fixed Group queryset
[ { "sha": "ddc4fe0716554c1864f94794d0fc8fbc35122795", "filename": "awx/main/access.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/f6ebf80ebaaf5d6f04e9469093155297f085a286/awx%2Fmain%2Faccess.py", "raw_url": "https://github.com/gconsidine/awx/raw/f6ebf80ebaaf5d6f04e9469093155297f085a286/awx%2Fmain%2Faccess.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Faccess.py?ref=f6ebf80ebaaf5d6f04e9469093155297f085a286", "patch": "@@ -439,7 +439,7 @@ class GroupAccess(BaseAccess):\n model = Group\n \n def get_queryset(self):\n- qs = self.model.accessible_objects(self.user, 'read_role')\n+ qs = Group.objects.filter(inventory__in=Inventory.accessible_objects(self.user, 'read_role'))\n qs = qs.select_related('created_by', 'modified_by', 'inventory')\n return qs.prefetch_related('parents', 'children', 'inventory_source').all()\n " } ]
awx
3697ddc2d0ba6d1c6a0997ac6e71c18035267c1b
e1853372f7a939f1759a6ee79676e0b3446fc0c0
awx/main/signals.py
https://github.com/gconsidine/awx
true
false
true
@@ -167,7 +167,7 @@ def rbac_activity_stream(instance, sender, **kwargs): elif sender.__name__ == 'Role_parents': role = kwargs['model'].objects.filter(pk__in=kwargs['pk_set']).first() # don't record implicit creation / parents - if role.content_type is not None: + if role is not None and role.content_type is not None: parent = role.content_type.name + "." + role.role_field # Get the list of implicit parents that were defined at the class level. # We have to take this list from the class property to avoid including parents
if role . content_type is not None : parent = role . content_type . name + "." + role . role_field
if role is not None and role . content_type is not None : parent = role . content_type . name + "." + role . role_field
MORE_SPECIFIC_IF
[["Insert", ["if_statement", 3, 17, 6, 99], ["boolean_operator", "N0"], 1], ["Insert", "N0", ["comparison_operator", "N1"], 0], ["Insert", "N0", ["and:and", "T"], 1], ["Move", "N0", ["comparison_operator", 3, 20, 3, 49], 2], ["Insert", "N1", ["identifier:role", "T"], 0], ["Insert", "N1", ["is:is", "T"], 1], ["Insert", "N1", ["not:not", "T"], 2], ["Insert", "N1", ["none:None", "T"], 3]]
gconsidine/awx@3697ddc2d0ba6d1c6a0997ac6e71c18035267c1b
fixing Role is None exception
[ { "sha": "f171146c09248607ee6cca9d9683a2b0dd6f406c", "filename": "awx/main/signals.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/3697ddc2d0ba6d1c6a0997ac6e71c18035267c1b/awx%2Fmain%2Fsignals.py", "raw_url": "https://github.com/gconsidine/awx/raw/3697ddc2d0ba6d1c6a0997ac6e71c18035267c1b/awx%2Fmain%2Fsignals.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fsignals.py?ref=3697ddc2d0ba6d1c6a0997ac6e71c18035267c1b", "patch": "@@ -167,7 +167,7 @@ def rbac_activity_stream(instance, sender, **kwargs):\n elif sender.__name__ == 'Role_parents':\n role = kwargs['model'].objects.filter(pk__in=kwargs['pk_set']).first()\n # don't record implicit creation / parents\n- if role.content_type is not None:\n+ if role is not None and role.content_type is not None:\n parent = role.content_type.name + \".\" + role.role_field\n # Get the list of implicit parents that were defined at the class level.\n # We have to take this list from the class property to avoid including parents" } ]
awx
80b044580d4344bdcaf6cccf7d4a8e4f3fa43214
6bc025c86ffb9dd894f02402ee3b7f3168e69f62
awx/main/tests/functional/test_notifications.py
https://github.com/gconsidine/awx
true
false
true
@@ -117,8 +117,8 @@ def test_notification_template_invalid_notification_type(patch, notification_tem def test_disallow_delete_when_notifications_pending(delete, user, notification_template): u = user('superuser', True) url = reverse('api:notification_template_detail', args=(notification_template.id,)) - n = Notification.objects.create(notification_template=notification_template, - status='pending') + Notification.objects.create(notification_template=notification_template, + status='pending') response = delete(url, user=u) assert response.status_code == 405
n = Notification . objects . create ( notification_template = notification_template , status = 'pending' )
Notification . objects . create ( notification_template = notification_template , status = 'pending' )
SINGLE_STMT
[["Move", ["expression_statement", 3, 5, 4, 54], ["call", 3, 9, 4, 54], 0], ["Delete", ["identifier:n", 3, 5, 3, 6]], ["Delete", ["=:=", 3, 7, 3, 8]], ["Delete", ["assignment", 3, 5, 4, 54]]]
gconsidine/awx@80b044580d4344bdcaf6cccf7d4a8e4f3fa43214
Fix up flake8 for notification tests
[ { "sha": "e5494edbeae1238056ebdc67fcbeaa669167a8b1", "filename": "awx/main/tests/functional/test_notifications.py", "status": "modified", "additions": 2, "deletions": 2, "changes": 4, "blob_url": "https://github.com/gconsidine/awx/blob/80b044580d4344bdcaf6cccf7d4a8e4f3fa43214/awx%2Fmain%2Ftests%2Ffunctional%2Ftest_notifications.py", "raw_url": "https://github.com/gconsidine/awx/raw/80b044580d4344bdcaf6cccf7d4a8e4f3fa43214/awx%2Fmain%2Ftests%2Ffunctional%2Ftest_notifications.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftests%2Ffunctional%2Ftest_notifications.py?ref=80b044580d4344bdcaf6cccf7d4a8e4f3fa43214", "patch": "@@ -117,8 +117,8 @@ def test_notification_template_invalid_notification_type(patch, notification_tem\n def test_disallow_delete_when_notifications_pending(delete, user, notification_template):\n u = user('superuser', True)\n url = reverse('api:notification_template_detail', args=(notification_template.id,))\n- n = Notification.objects.create(notification_template=notification_template,\n- status='pending')\n+ Notification.objects.create(notification_template=notification_template,\n+ status='pending')\n response = delete(url, user=u)\n assert response.status_code == 405\n " } ]
awx
8ad505c9cc498e6878a68376346684aa10d1669d
4f534122c52eab4bf817bf7486dbb62bab5d8eab
awx/api/views.py
https://github.com/gconsidine/awx
true
false
true
@@ -3462,7 +3462,7 @@ class NotificationTemplateDetail(RetrieveUpdateDestroyAPIView): if obj.notifications.filter(status='pending').exists(): return Response({"error": "Delete not allowed while there are pending notifications"}, status=status.HTTP_405_METHOD_NOT_ALLOWED) - return resp + return super(NotificationTemplateDetail, self).delete(request, *args, **kwargs) class NotificationTemplateTest(GenericAPIView):
return resp
return super ( NotificationTemplateDetail , self ) . delete ( request , * args , ** kwargs )
SINGLE_STMT
[["Insert", ["return_statement", 3, 9, 3, 20], ["call", "N0"], 1], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Insert", "N1", ["call", "N3"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:delete", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["identifier:request", "T"], 1], ["Insert", "N2", [",:,", "T"], 2], ["Insert", "N2", ["list_splat", "N4"], 3], ["Insert", "N2", [",:,", "T"], 4], ["Insert", "N2", ["dictionary_splat", "N5"], 5], ["Insert", "N2", ["):)", "T"], 6], ["Update", ["identifier:resp", 3, 16, 3, 20], "super"], ["Move", "N3", ["identifier:resp", 3, 16, 3, 20], 0], ["Insert", "N3", ["argument_list", "N6"], 1], ["Insert", "N4", ["*:*", "T"], 0], ["Insert", "N4", ["identifier:args", "T"], 1], ["Insert", "N5", ["**:**", "T"], 0], ["Insert", "N5", ["identifier:kwargs", "T"], 1], ["Insert", "N6", ["(:(", "T"], 0], ["Insert", "N6", ["identifier:NotificationTemplateDetail", "T"], 1], ["Insert", "N6", [",:,", "T"], 2], ["Insert", "N6", ["identifier:self", "T"], 3], ["Insert", "N6", ["):)", "T"], 4]]
gconsidine/awx@8ad505c9cc498e6878a68376346684aa10d1669d
Fixed notification template deleting #2674
[ { "sha": "59f7adb745d7031170ac34bc7f225658f8f4584f", "filename": "awx/api/views.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/8ad505c9cc498e6878a68376346684aa10d1669d/awx%2Fapi%2Fviews.py", "raw_url": "https://github.com/gconsidine/awx/raw/8ad505c9cc498e6878a68376346684aa10d1669d/awx%2Fapi%2Fviews.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fviews.py?ref=8ad505c9cc498e6878a68376346684aa10d1669d", "patch": "@@ -3462,7 +3462,7 @@ def delete(self, request, *args, **kwargs):\n if obj.notifications.filter(status='pending').exists():\n return Response({\"error\": \"Delete not allowed while there are pending notifications\"},\n status=status.HTTP_405_METHOD_NOT_ALLOWED)\n- return resp\n+ return super(NotificationTemplateDetail, self).delete(request, *args, **kwargs)\n \n class NotificationTemplateTest(GenericAPIView):\n " } ]
awx
e818daa74f4ca9004176024c7ac042a570a7f10f
6f015e77c9c1eb043c62f965272b97592e4242b7
awx/main/models/rbac.py
https://github.com/gconsidine/awx
true
false
true
@@ -75,7 +75,7 @@ def check_singleton(func): if user in sys_admin or user in sys_audit: if len(args) == 2: return args[1] - return user.roles.all() + return Roles.objects.all() return func(*args, **kwargs) return wrapper
return user . roles . all ( )
return Roles . objects . all ( )
SINGLE_STMT
[["Update", ["identifier:user", 3, 20, 3, 24], "Roles"], ["Update", ["identifier:roles", 3, 25, 3, 30], "objects"]]
gconsidine/awx@e818daa74f4ca9004176024c7ac042a570a7f10f
Ensure system auditors/admins can see all roles Partial fix for #2744
[ { "sha": "633c60c1750649c59347a16c90b3e1b013fcde42", "filename": "awx/main/models/rbac.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/e818daa74f4ca9004176024c7ac042a570a7f10f/awx%2Fmain%2Fmodels%2Frbac.py", "raw_url": "https://github.com/gconsidine/awx/raw/e818daa74f4ca9004176024c7ac042a570a7f10f/awx%2Fmain%2Fmodels%2Frbac.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmodels%2Frbac.py?ref=e818daa74f4ca9004176024c7ac042a570a7f10f", "patch": "@@ -75,7 +75,7 @@ def wrapper(*args, **kwargs):\n if user in sys_admin or user in sys_audit:\n if len(args) == 2:\n return args[1]\n- return user.roles.all()\n+ return Roles.objects.all()\n return func(*args, **kwargs)\n return wrapper\n " } ]
awx
5df846eb0a60d38471bf354f86a47670ec4b87ff
ddbe54f841b03c4540af97fd3ccb449797dd72c1
awx/main/tests/functional/conftest.py
https://github.com/gconsidine/awx
true
false
true
@@ -158,7 +158,7 @@ def machine_credential(): return Credential.objects.create(name='machine-cred', kind='ssh', username='test_user', password='pas4word') @pytest.fixture -def org_credential(organization, credential): +def org_credential(organization): return Credential.objects.create(kind='aws', name='test-cred', organization=organization) @pytest.fixture
def org_credential ( organization , credential ) : return Credential . objects . create ( kind = 'aws' , name = 'test-cred' , organization = organization )
def org_credential ( organization ) : return Credential . objects . create ( kind = 'aws' , name = 'test-cred' , organization = organization )
SINGLE_STMT
[["Delete", [",:,", 3, 32, 3, 33]], ["Delete", ["identifier:credential", 3, 34, 3, 44]]]
gconsidine/awx@5df846eb0a60d38471bf354f86a47670ec4b87ff
remove unnecessary fixture from org_credential
[ { "sha": "076f368631f9aea8b80b68e322c609befa98159b", "filename": "awx/main/tests/functional/conftest.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/5df846eb0a60d38471bf354f86a47670ec4b87ff/awx%2Fmain%2Ftests%2Ffunctional%2Fconftest.py", "raw_url": "https://github.com/gconsidine/awx/raw/5df846eb0a60d38471bf354f86a47670ec4b87ff/awx%2Fmain%2Ftests%2Ffunctional%2Fconftest.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftests%2Ffunctional%2Fconftest.py?ref=5df846eb0a60d38471bf354f86a47670ec4b87ff", "patch": "@@ -158,7 +158,7 @@ def machine_credential():\n return Credential.objects.create(name='machine-cred', kind='ssh', username='test_user', password='pas4word')\n \n @pytest.fixture\n-def org_credential(organization, credential):\n+def org_credential(organization):\n return Credential.objects.create(kind='aws', name='test-cred', organization=organization)\n \n @pytest.fixture" } ]
awx
f75eba471ed30d35d09fc680f7bd5b2853c424df
9476bc12e9925f19f275af7d63e5889c4748a4f0
awx/api/serializers.py
https://github.com/gconsidine/awx
true
false
true
@@ -2429,7 +2429,7 @@ class NotificationTemplateSerializer(BaseSerializer): notification_type = self.instance.notification_type if not notification_type: raise serializers.ValidationError('Missing required fields for Notification Configuration: notification_type') - if not 'organization' in attrs: + if 'organization' not in attrs: raise serializers.ValidationError("Missing 'organization' from required fields") notification_class = NotificationTemplate.CLASS_FOR_NOTIFICATION_TYPE[notification_type]
if not 'organization' in attrs : raise serializers . ValidationError ( "Missing 'organization' from required fields" )
if 'organization' not in attrs : raise serializers . ValidationError ( "Missing 'organization' from required fields" )
SINGLE_STMT
[["Move", ["if_statement", 3, 9, 4, 93], ["comparison_operator", 3, 16, 3, 39], 1], ["Insert", ["comparison_operator", 3, 16, 3, 39], ["not:not", "T"], 1], ["Delete", ["not:not", 3, 12, 3, 15]], ["Delete", ["not_operator", 3, 12, 3, 39]]]
gconsidine/awx@f75eba471ed30d35d09fc680f7bd5b2853c424df
Fix flake8 'not in'
[ { "sha": "c122351a3275032c75c58a2b7a8aaf0f88d66d75", "filename": "awx/api/serializers.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/f75eba471ed30d35d09fc680f7bd5b2853c424df/awx%2Fapi%2Fserializers.py", "raw_url": "https://github.com/gconsidine/awx/raw/f75eba471ed30d35d09fc680f7bd5b2853c424df/awx%2Fapi%2Fserializers.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fserializers.py?ref=f75eba471ed30d35d09fc680f7bd5b2853c424df", "patch": "@@ -2429,7 +2429,7 @@ def validate(self, attrs):\n notification_type = self.instance.notification_type\n if not notification_type:\n raise serializers.ValidationError('Missing required fields for Notification Configuration: notification_type')\n- if not 'organization' in attrs:\n+ if 'organization' not in attrs:\n raise serializers.ValidationError(\"Missing 'organization' from required fields\")\n \n notification_class = NotificationTemplate.CLASS_FOR_NOTIFICATION_TYPE[notification_type]" } ]
awx
25c177cdcd71d78b3decd81577ce11430c48bff9
b6ec87f18a618f681439a60f6ea1762f0b57a16c
awx/api/views.py
https://github.com/gconsidine/awx
true
false
true
@@ -1040,7 +1040,7 @@ class ProjectActivityStreamList(SubListAPIView): return qs elif parent.credential is None: return qs.filter(project=parent) - return qs.filter(Q(project=parent) | Q(credential__in=parent.credential)) + return qs.filter(Q(project=parent) | Q(credential=parent.credential)) class ProjectNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
return qs . filter ( Q ( project = parent ) | Q ( credential__in = parent . credential ) )
return qs . filter ( Q ( project = parent ) | Q ( credential = parent . credential ) )
CHANGE_KEYWORD_ARGUMENT_USED
[["Update", ["identifier:credential__in", 3, 48, 3, 62], "credential"]]
gconsidine/awx@25c177cdcd71d78b3decd81577ce11430c48bff9
M2M fix for project activity stream
[ { "sha": "a9a2e805496d66f606f14342ba8a0a6736ddda88", "filename": "awx/api/views.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/25c177cdcd71d78b3decd81577ce11430c48bff9/awx%2Fapi%2Fviews.py", "raw_url": "https://github.com/gconsidine/awx/raw/25c177cdcd71d78b3decd81577ce11430c48bff9/awx%2Fapi%2Fviews.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fviews.py?ref=25c177cdcd71d78b3decd81577ce11430c48bff9", "patch": "@@ -1040,7 +1040,7 @@ def get_queryset(self):\n return qs\n elif parent.credential is None:\n return qs.filter(project=parent)\n- return qs.filter(Q(project=parent) | Q(credential__in=parent.credential))\n+ return qs.filter(Q(project=parent) | Q(credential=parent.credential))\n \n class ProjectNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):\n " } ]
awx
43c5105a577dc0de221f53183c818785a24b3686
1e037098f227ff7df7127920f845f7d891940319
awx/main/tasks.py
https://github.com/gconsidine/awx
true
false
false
@@ -599,7 +599,7 @@ class BaseTask(Task): else: child_procs = main_proc.get_children(recursive=True) for child_proc in child_procs: - os.kill(child_proc.pid, signal.SIGTERM) + os.kill(child_proc.pid, signal.SIGKILL) except TypeError: os.kill(child.pid, signal.SIGKILL) else:
os . kill ( child_proc . pid , signal . SIGTERM )
os . kill ( child_proc . pid , signal . SIGKILL )
CHANGE_ATTRIBUTE_USED
[["Update", ["identifier:SIGTERM", 3, 68, 3, 75], "SIGKILL"]]
gconsidine/awx@43c5105a577dc0de221f53183c818785a24b3686
No more Mr. Nice Tower or how I learned to stop worrying and use SIGKILL. Ansible has a bug that could potentially leave tower's jobs hanging around indefinitely requiring manual shell intervention. I'm going to put this here and until Ansible can get to the bottom of it.
[ { "sha": "f4d442fbeb629662c77dc17a004ad799f44005f0", "filename": "awx/main/tasks.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/43c5105a577dc0de221f53183c818785a24b3686/awx%2Fmain%2Ftasks.py", "raw_url": "https://github.com/gconsidine/awx/raw/43c5105a577dc0de221f53183c818785a24b3686/awx%2Fmain%2Ftasks.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftasks.py?ref=43c5105a577dc0de221f53183c818785a24b3686", "patch": "@@ -599,7 +599,7 @@ def run_pexpect(self, instance, args, cwd, env, passwords, stdout_handle,\n else:\n child_procs = main_proc.get_children(recursive=True)\n for child_proc in child_procs:\n- os.kill(child_proc.pid, signal.SIGTERM)\n+ os.kill(child_proc.pid, signal.SIGKILL)\n except TypeError:\n os.kill(child.pid, signal.SIGKILL)\n else:" } ]
awx
df1c1ae4738f3ba0e8d3b8bce9e051569bd7b903
eb97b8c91646a55e360eb5f2f8ea800f2f5a1696
awx/main/tests/functional/api/test_job_template.py
https://github.com/gconsidine/awx
true
false
true
@@ -344,6 +344,6 @@ def test_disallow_template_delete_on_running_job(job_template_factory, delete, a project='p', inventory='i', organization='o') - j = objects.job_template.create_unified_job() + objects.job_template.create_unified_job() delete_response = delete(reverse('api:job_template_detail', args=[objects.job_template.pk]), user=admin_user) assert delete_response.status_code == 409
organization = 'o' ) j = objects . job_template . create_unified_job ( )
organization = 'o' ) objects . job_template . create_unified_job ( )
SINGLE_STMT
[["Update", ["identifier:j", 3, 5, 3, 6], "organization"], ["Delete", ["identifier:organization", 2, 36, 2, 48]], ["Delete", ["=:=", 2, 48, 2, 49]]]
gconsidine/awx@df1c1ae4738f3ba0e8d3b8bce9e051569bd7b903
Fix up flake8
[ { "sha": "cab2e537310facc32ddd15f122c6a4f6c14bf8a1", "filename": "awx/main/tests/functional/api/test_job_template.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/df1c1ae4738f3ba0e8d3b8bce9e051569bd7b903/awx%2Fmain%2Ftests%2Ffunctional%2Fapi%2Ftest_job_template.py", "raw_url": "https://github.com/gconsidine/awx/raw/df1c1ae4738f3ba0e8d3b8bce9e051569bd7b903/awx%2Fmain%2Ftests%2Ffunctional%2Fapi%2Ftest_job_template.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Ftests%2Ffunctional%2Fapi%2Ftest_job_template.py?ref=df1c1ae4738f3ba0e8d3b8bce9e051569bd7b903", "patch": "@@ -344,6 +344,6 @@ def test_disallow_template_delete_on_running_job(job_template_factory, delete, a\n project='p',\n inventory='i',\n organization='o')\n- j = objects.job_template.create_unified_job()\n+ objects.job_template.create_unified_job()\n delete_response = delete(reverse('api:job_template_detail', args=[objects.job_template.pk]), user=admin_user)\n assert delete_response.status_code == 409" } ]
awx
62562afd0d7b0ce14d18da25617a6057cdb5ec8f
df1c1ae4738f3ba0e8d3b8bce9e051569bd7b903
awx/sso/backends.py
https://github.com/gconsidine/awx
true
false
true
@@ -239,7 +239,7 @@ def on_populate_user(sender, **kwargs): team, created = Team.objects.get_or_create(name=team_name, organization=org) users_opts = team_opts.get('users', None) remove = bool(team_opts.get('remove', True)) - _update_m2m_from_groups(user, ldap_user, team.member_role.users, users_opts, + _update_m2m_from_groups(user, ldap_user, team.member_role.members, users_opts, remove) # Update user profile to store LDAP DN.
_update_m2m_from_groups ( user , ldap_user , team . member_role . users , users_opts , remove )
_update_m2m_from_groups ( user , ldap_user , team . member_role . members , users_opts , remove )
CHANGE_ATTRIBUTE_USED
[["Update", ["identifier:users", 3, 67, 3, 72], "members"]]
gconsidine/awx@62562afd0d7b0ce14d18da25617a6057cdb5ec8f
Fix for populating teams for LDAP user.
[ { "sha": "4f479b954f08f04618b5a7c0754a8e9948aadbc7", "filename": "awx/sso/backends.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/62562afd0d7b0ce14d18da25617a6057cdb5ec8f/awx%2Fsso%2Fbackends.py", "raw_url": "https://github.com/gconsidine/awx/raw/62562afd0d7b0ce14d18da25617a6057cdb5ec8f/awx%2Fsso%2Fbackends.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fsso%2Fbackends.py?ref=62562afd0d7b0ce14d18da25617a6057cdb5ec8f", "patch": "@@ -239,7 +239,7 @@ def on_populate_user(sender, **kwargs):\n team, created = Team.objects.get_or_create(name=team_name, organization=org)\n users_opts = team_opts.get('users', None)\n remove = bool(team_opts.get('remove', True))\n- _update_m2m_from_groups(user, ldap_user, team.member_role.users, users_opts,\n+ _update_m2m_from_groups(user, ldap_user, team.member_role.members, users_opts,\n remove)\n \n # Update user profile to store LDAP DN." } ]
awx
94052c2b2b1756c4c7e8d4753d45bbf5b1753b94
4b3f1d44def7f48bfab52c9e10491177b8f917b8
awx/main/migrations/_rbac.py
https://github.com/gconsidine/awx
true
false
true
@@ -159,7 +159,7 @@ def migrate_credential(apps, schema_editor): InventorySource = apps.get_model('main', 'InventorySource') for cred in Credential.objects.iterator(): - results = [x for x in JobTemplate.objects.filter(Q(credential=cred) | Q(cloud_credential=cred)).all()] + \ + results = [x for x in JobTemplate.objects.filter(Q(credential=cred) | Q(cloud_credential=cred), inventory__isnull=False).all()] + \ [x for x in InventorySource.objects.filter(credential=cred).all()] if cred.deprecated_team is not None and results: if len(results) == 1:
results = [ x for x in JobTemplate . objects . filter ( Q ( credential = cred ) | Q ( cloud_credential = cred ) ) . all ( ) ] + [ x for x in InventorySource . objects . filter ( credential = cred ) . all ( ) ]
results = [ x for x in JobTemplate . objects . filter ( Q ( credential = cred ) | Q ( cloud_credential = cred ) , inventory__isnull = False ) . all ( ) ] + [ x for x in InventorySource . objects . filter ( credential = cred ) . all ( ) ]
SAME_FUNCTION_MORE_ARGS
[["Insert", ["argument_list", 3, 57, 3, 104], [",:,", "T"], 2], ["Insert", ["argument_list", 3, 57, 3, 104], ["keyword_argument", "N0"], 3], ["Insert", "N0", ["identifier:inventory__isnull", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["false:False", "T"], 2]]
gconsidine/awx@94052c2b2b1756c4c7e8d4753d45bbf5b1753b94
Fixed credential migration issue involving null inventory fields in job templates Don't attempt to derive credential organization information from a job template when the inventory field is null for that job template #3107
[ { "sha": "b60ac65691c965f61c370b7b2ba5bdeeee76ac2c", "filename": "awx/main/migrations/_rbac.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/94052c2b2b1756c4c7e8d4753d45bbf5b1753b94/awx%2Fmain%2Fmigrations%2F_rbac.py", "raw_url": "https://github.com/gconsidine/awx/raw/94052c2b2b1756c4c7e8d4753d45bbf5b1753b94/awx%2Fmain%2Fmigrations%2F_rbac.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmigrations%2F_rbac.py?ref=94052c2b2b1756c4c7e8d4753d45bbf5b1753b94", "patch": "@@ -159,7 +159,7 @@ def migrate_credential(apps, schema_editor):\n InventorySource = apps.get_model('main', 'InventorySource')\n \n for cred in Credential.objects.iterator():\n- results = [x for x in JobTemplate.objects.filter(Q(credential=cred) | Q(cloud_credential=cred)).all()] + \\\n+ results = [x for x in JobTemplate.objects.filter(Q(credential=cred) | Q(cloud_credential=cred), inventory__isnull=False).all()] + \\\n [x for x in InventorySource.objects.filter(credential=cred).all()]\n if cred.deprecated_team is not None and results:\n if len(results) == 1:" } ]