method does not return the latest record - boto

In the following example I can return the record if I already know it's sequence number that was returned when I inserted the data.
But how do I return all the data that has been added by different devices?
import boto
mykin = boto.connect_kinesis(aws_access_key_id='access_key',
aws_secret_access_key='secret_key')
myput = mykin.put_record(stream_name='mytest', data='abcdefghij',
partition_key='parti11', b64_encode=True)
myiterator = mykin.get_shard_iterator(stream_name='mytest',
shard_id='shardId-000000000000',
shard_iterator_type='AT_SEQUENCE_NUMBER',
starting_sequence_number=
myput['SequenceNumber'])
mykin.get_records(shard_iterator=myiterator['ShardIterator'])

in your producer:
kinesis = boto.kinesis.connect_to_region('us-east-1', aws_access_key_id, aws_secret_access_key)
response = kinesis.describe_stream('stream_name')
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
response = kinesis.get_shard_iterator('stream_name', shard_id, 'TRIM_HORIZON')

Related

Cloud Function: Adding a Date Field to Table

I´m using Google Cloud Funtion to create a table, for now everything works the way it is supposed to.
But I would like to add a new field in the table, one that could show the time of its creation.
This is an example of the code that I´m using at the moment.
Don´t know why Is not working but my main goal is to actually be able to do it with one table and the replicate the process in codes there I handle two or more tables.
Example:
structure of the data in the bucket:
Function
Code
main
from google.cloud
import bigquery
import pandas as pd
from previsional_tables
import table_TEST1
creation_date = pd.Timestamp.now()# Here is where I´ m supposed to get the date.
def main_function(event, context):
dataset = 'bd_clients'
file = event
input_bucket_name = file['bucket']
path_file = file['name']
uri = 'gs://{}/{}'.format(input_bucket_name, path_file)
path_file_list = path_file.split("/")
file_name_ext = path_file_list[len(path_file_list) - 1]
file_name_ext_list = file_name_ext.split(".")
name_file = file_name_ext_list[0]
print('nombre archivo ==> ' + name_file.upper())
print('Getting the data from bucket "{}"'.format(uri))
path_file_name = str(uri)
print("ruta: ", path_file_name)
if ("gs://bucket_test" in path_file_name):
client = bigquery.Client()
job_config = bigquery.LoadJobConfig()
table_test1(dataset, client, uri, job_config, bigquery)
tables
def table_test1(dataset, client, uri, job_config, bigquery):
table = "test1"
dataset_ref = client.dataset(dataset)
job_config.autodetect = True
job_config.max_bad_records = 1000
job_config.schema = [
bigquery.SchemaField("NAME", "STRING"),
bigquery.SchemaField("LAST_NAME", "STRING"),
bigquery.SchemaField("ADDRESS", "STRING"),
bigquery.SchemaField("DATE", bigquery.enums.SqlTypeNames.DATE)# create each column in Big Query along with types
]
job_config.source_format = bigquery.SourceFormat.CSV
job_config.field_delimiter = ';'
job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND
load_job = client.load_table_from_uri(uri, dataset_ref.table(table), job_config = job_config)
requirements
# Function dependencies, for example:
# package>=version
google-cloud-bigquery==2.25.1
pysftp==0.2.9
pandas==1.4.2
fsspec==2022.5.0
gcsfs==2022.5.0
Output structure in Database:

Discord.PY JSON

So I was working on something and weird things are happening. My amounts.json is resetting and the code isn't adding to the amounts.json. (ex. I type the command that adds to the number and it does nothing to the amounts.json)
Code:
#bot.command(pass_context=True)
async def redeem(ctx, key):
with open('amounts.json') as f:
amounts = json.load(f)
id = int(ctx.message.author.id)
if key in amounts:
if amounts[key] < int(2):
await ctx.send("You have been given Buyer role!")
amounts[key] += int(1)
member = ctx.message.author
this_guild = member.guild
role = get(member.guild.roles, name='Buyer')
await member.add_roles(role)
Message = ctx.message
await Message.delete()
await ctx.send("You have been given Buyer role!")
_save()
else:
await ctx.send("Invalid Key!")
else:
await ctx.send("Invalid Key!")
JSON
{"196430670": 0}
You are not saving your JSON file back, based on the edited amounts dict.

Object.get() is not iterable

I have this view
def view_involved_people(request):
schedule = request.POST['schedule']
query = Schedule.objects.get(pk=schedule)
serialized = serializers.serialize('json', query)
data = {'people': serialized}
return JsonResponse(data)
It displays that the object is not iterable. I think it is because I am only getting one instance of the object. However, how can I prevent this error and get this data from the view?
I have tried using .filter() but when I call data.attribute_name, it does not display the value
You have to use filter in you case:
def view_involved_people(request):
schedule = request.POST['schedule']
query = Schedule.objects.filter(pk=schedule)
serialized = serializers.serialize('json', query)
data = {'people': serialized}
return JsonResponse(data)

Use HttpResponse with JSON data in this code

This code seems to work fine when I used Django console and just print it.
reference = FloodHazard.objects.filter(hazard='High')
ids = reference.values_list('id', flat=True)
for myid in ids:
getgeom = FloodHazard.objects.get(id=myid).geom
response = BuildingStructure.objects.filter(geom__intersects=getgeom).values(
'brgy_locat').annotate(counthigh=Count('brgy_locat'))
print response
I was able to show all the values, but when using HttpResponse, it returns an empty set. What is the proper way of returning JSON data from a queryset? So far, tried this:
reference = FloodHazard.objects.filter(hazard='High')
ids = reference.values_list('id', flat=True)
response = {}
for myid in ids:
getgeom = FloodHazard.objects.get(id=myid).geom
response['high'] = BuildingStructure.objects.filter(geom__intersects=getgeom).values(
'brgy_locat').annotate(counthigh=Count('brgy_locat'))
json_post = ujson.dumps(list(response))
return HttpResponse(json_post, content_type='application/json')
There is no much sense in your code. You assign all querysets to the single key in the response dict. You should use a list for this purpose:
As far as I understand the code should be something like this:
response = []
for myid in ids:
getgeom = FloodHazard.objects.get(id=myid).geom
response.extend(BuildingStructure.objects.filter(geom__intersects=getgeom)
.values('brgy_locat')
.annotate(counthigh=Count('brgy_locat')))
json_post = ujson.dumps(response)
If you want to return a hazard level as well as the list of buildings then you can return a dict:
json_post = ujson.dumps({'hazard': 'high', 'buildings': response})

Django TypeError not JSON serializable in request.session

I have a sort function on a project I'm working on, where users can create a sort query of all the assets they're working on. When they get the results of their query, I want them to be able to download a .csv of all the objects in the query.
However, when I try to store the query results in a session, I get an error that the results are not JSON serializable. If I don't try to store the query results then the sort runs fine, but then export button won't work since the query results haven't been stored.
In my views:
def sort(request, project_id=1):
thisuser = request.user
project = Project.objects.get(id=project_id)
if Project.objects.filter(Q(created_by=thisuser) | Q(access__give_access_to=thisuser), id=project_id).exists():
permission = 1
else:
permission = None
if Asset.objects.filter(project__id=project_id, unique_id=1):
assets = 1
else:
assets = None
if request.POST:
if request.POST.get('date_start') and request.POST.get('date_end'):
date_start = datetime.strptime(request.POST['date_start'], '%m/%d/%Y')
date_end = datetime.strptime(request.POST['date_end'], '%m/%d/%Y')
q_date = Q(date_produced__range=[date_start, date_end])
else:
q_date = Q(date_produced__isnull=False) | Q(date_produced__isnull=True)
text_fields = {
'asset_type': request.POST.get('asset_type'),
'description': request.POST.get('description'),
'master_status': request.POST.get('master_status'),
'location': request.POST.get('location'),
'file_location': request.POST.get('file_location'),
'footage_format': request.POST.get('footage_format'),
'footage_region': request.POST.get('footage_region'),
'footage_type': request.POST.get('footage_type'),
'footage_fps': request.POST.get('footage_fps'),
'footage_url': request.POST.get('footage_url'),
'stills_credit': request.POST.get('stills_credit'),
'stills_url': request.POST.get('stills_url'),
'music_format': request.POST.get('music_format'),
'music_credit': request.POST.get('music_credit'),
'music_url': request.POST.get('music_url'),
'license_type': request.POST.get('license_type'),
'source': request.POST.get('source'),
'source_contact': request.POST.get('source_contact'),
'source_email': request.POST.get('source_email'),
'source_id': request.POST.get('source_id'),
'source_phone': request.POST.get('source_phone'),
'source_fax': request.POST.get('source_fax'),
'source_address': request.POST.get('source_address'),
'credit_language': request.POST.get('source_language'),
'cost': request.POST.get('cost'),
'cost_model': request.POST.get('cost_model'),
'total_cost': request.POST.get('total_cost'),
'notes': request.POST.get('notes')
}
boolean_fields = {
'used_in_film': request.POST.get('used_in_film'),
'footage_blackandwhite': request.POST.get('footage_blackandwhite'),
'footage_color': request.POST.get('footage_color'),
'footage_sepia': request.POST.get('footage_sepia'),
'stills_blackandwhite': request.POST.get('stills_blackandwhite'),
'stills_color': request.POST.get('stills_color'),
'stills_sepia': request.POST.get('stills_sepia'),
'license_obtained': request.POST.get('license_obtained')
}
q_objects = Q()
for field, value in text_fields.iteritems():
if value:
q_objects = Q(**{field+'__contains': value})
q_boolean = Q()
for field, value in boolean_fields.iteritems():
if value:
q_boolean |= Q(**{field: True})
query_results = Asset.objects.filter(q_date, q_objects, q_boolean)
list(query_results)
request.session['query_results'] = list(query_results)
args = {'query_results': query_results, 'thisuser': thisuser, 'project': project, 'assets': assets}
args.update(csrf(request))
args['query_results'] = query_results
return render_to_response('sort_results.html', args)
else:
args = {'thisuser': thisuser, 'project': project, 'assets': assets}
args.update(csrf(request))
return render_to_response('sort.html', args)
This is the line: "request.session['query_results'] = list(query_results)" that causes it to fail. It also fails if it's "request.session['query_results'] = query_results"
The reason of this error is that you try to assign list on model instances to session. Model instance cannot be serialized to JSON. If you want to pass list of instances of Asset model to session you can do in that way:
query_results = Asset.objects.values('id','name').filter(q_date, q_objects, q_boolean)
You can list necessary model fields in values()
In that case you will have list of dictionaries, not instances. And this list may be assigned to session. But you cannot operate with this dictionaries like instances of class Assign, i.e. you cannot call class methods and so on.