Commit ebf85bff authored by Sheetal Kashid's avatar Sheetal Kashid

Merge branch 'master' of https://github.com/gnowledge/gstudio

parents b97aa355 bce4e33a
from gnowsys_ndf.ndf.models import node_collection
from gnowsys_ndf.ndf.views.methods import delete_node
# to find duplicate Author instances:
ag = node_collection.collection.aggregate([{'$match': {'_type': 'Author'}}, {'$group': {'_id': {'created_by': '$created_by', }, 'objs': {'$push': '$$CURRENT'}, 'count': {'$sum': 1} }}, {'$match': {'count': {'$gt': 1}}}, ])
# aggregate all Author objects in a list
j = [x for sl in (i.get('objs') for i in ag['result']) for x in sl]
auth_dec = {}
# create a dict to have a list of ids to keep and delete to make decision:
for i in j:
try:
if i['created_at'] < auth_dec[i['created_by']]['old'].keys()[0]:
auth_dec[i['created_by']]['new'].extends(auth_dec[i['created_by']]['old'])
auth_dec[i['created_by']]['old'] = {i['created_at']: i['_id']}
else:
auth_dec[i['created_by']]['new'][i['created_at']] = i['_id']
except:
auth_dec[i['created_by']] = {'old': {i['created_at']: i['_id']}, 'new': {} }
# delete new instances:
for each_node_to_del_id in ([ sl for x in (i['new'].values() for i in auth_dec.values()) for sl in x ]):
delete_node(each_node_to_del, deletion_type=1)
# Pending:
# - check for grelation `profile_pic` and other to take decision of which object to keep
\ No newline at end of file
......@@ -103,6 +103,7 @@ media urls in nested dictionary. Following is schema of if\_file:
- "collection\_set": List of resource names to organize resources.
- "post\_node": N/A
- "prior\_node": List of pre-requisites of concern resource.
- "relation\_set": Objects relations in a key value pair form. Value can be single ObjectId or list of ObjectId.
- curricular: Boolean value either true or false. Specifies resource is
curricular or not.
......@@ -126,25 +127,31 @@ media urls in nested dictionary. Following is schema of if\_file:
--------------
**Example API calls:** Examples can be used live on nroer.gov.in.
List possible query keys: - http://nroer.gov.in/api/v1
List possible values for query keys - http://nroer.gov.in/api/v1/tags -
http://nroer.gov.in/api/v1/status
List json objects with various key and value combination: -
http://nroer.gov.in/api/v1?tags=creative -
http://nroer.gov.in/api/v1?resource\_type=File&tags=creative -
http://nroer.gov.in/api/v1?workspace=home&resource\_type=File&tag=creative
-
http://nroer.gov.in/api/v1/?workspace=home&resource\_type=File&created\_by=nroer\_team&educationaluse=Images
-
http://nroer.gov.in/api/v1/?workspace=home&resource\_type=File&educationalsubject=history
-
http://nroer.gov.in/api/v1/?workspace=home&resource\_type=File&source=CIET,%20NCERT
-
http://nroer.gov.in/api/v1/?workspace=home&resource\_type=File&created\_by=nroer\_team&legal.copyright=CC-BY-SA%204.0%20unported&interactivitytype=expositive&educationaluse=image
**Example API calls:** Examples can be used on live nroer.gov.in server.
List possible query keys:
- http://nroer.gov.in/api/v1
List possible values for query keys
- http://nroer.gov.in/api/v1/tags
- http://nroer.gov.in/api/v1/status
List json objects with various key and value combination:
- http://nroer.gov.in/api/v1?tags=creative
- http://nroer.gov.in/api/v1?resource\_type=File&tags=creative
- http://nroer.gov.in/api/v1?workspace=home&resource\_type=File&tag=creative
- http://nroer.gov.in/api/v1/?workspace=home&resource\_type=File&created\_by=nroer\_team&educationaluse=Images
- http://nroer.gov.in/api/v1/?workspace=home&resource\_type=File&educationalsubject=history
- http://nroer.gov.in/api/v1/?workspace=home&resource\_type=File&source=CIET,%20NCERT
- http://nroer.gov.in/api/v1/?workspace=home&resource\_type=File&created\_by=nroer\_team&legal.copyright=CC-BY-SA%204.0%20unported&interactivitytype=expositive&educationaluse=image
Relation support:
- Check for field `relation_set` in every object. It contains dict of relation-name (key) and ObjectId or [ObjectId] as value.
- We can derive full object with `//<domain>/api/v1?_id=<ObjectId>` API call.
- Example: To list out the topic ("Patterns and Symmetry"): https://nroer.gov.in/api/v1?resource_type=Topic&name=Patterns%20and%20Symmetry
- To get detail about one object of taught_by: https://nroer.gov.in/api/v1?_id=5af1810816b51c01e3736a5c
- NOTE: Get additional/complete details of object: https://nroer.gov.in/dev/query/5af1810816b51c01e3736a5c/nbh
If you need more specific help, contact the developers on the mailing
list, or file an issue at the git repo.
\ No newline at end of file
# Data Collection
# Data Collection [Complete]
All the CLIx schools are running either of the platforms:
[1]. gStudio
......@@ -17,7 +17,8 @@ Year (YYYY) [ LEVEL 0 ]
- db [ LEVEL 4 ]
- media [ LEVEL 4 ]
- rcs-repo [ LEVEL 4 ]
- pgdump-all.sql [ LEVEL 4 ]
- postgres-dump [ LEVEL 4 ]
- pg_dump_all.sql [ LEVEL 5 ]
- system-heartbeat [ LEVEL 4 ]
- gstudio-exported-users-analytics-csvs [ LEVEL 4 ]
- gstudio_tools_logs [ LEVEL 4 ]
......@@ -64,7 +65,7 @@ we have a provision to collect data from the said platforms and both may reside
- `db`: mongoDB data *(gStudio + qbank)*.
- `media`: Files uploaded on clixserver *(gStudio)*.
- `rcs-repo`: rcs, versioned json files *(gStudio)*.
- `pgdump-all.sql`: Postgres DB dump.
- `postgres-dump`: Postgres DB dump folder containing file `pg_dump_all.sql`. This is dump of user data.
- `system-heartbeat`: Contains log of following:
- Server Id
- School Id
......@@ -89,7 +90,6 @@ we have a provision to collect data from the said platforms and both may reside
- `studentResponseFiles`: User uploaded files in assessments e.g: recorded-audio, images etc.
- `nginx-logs`: Contains logs produced by nginx.
---
**EXAMPLE STRUCTURE**:
```
......@@ -102,7 +102,8 @@ Example-data-collection-dir-str/
│ │ ├── db
│ │ ├── media
│ │ ├── rcs-repo
│ │ ├── pgdump-all.sql
│ │ ├── postgres-dump
│ │ | └── pg_dump_all.sql
│ | ├── gstudio-exported-users-analytics-csvs
│ | ├── gstudio_tools_logs
│ | ├── gstudio-logs
......@@ -121,7 +122,8 @@ Example-data-collection-dir-str/
│ ├── db
│ ├── media
│ ├── rcs-repo
│ ├── pgdump-all.sql
│ ├── postgres-dump
│ | └── pg_dump_all.sql
│ ├── gstudio-exported-users-analytics-csvs
│ ├── gstudio_tools_logs
│ ├── gstudio-logs
......@@ -135,3 +137,53 @@ Example-data-collection-dir-str/
│ └── studentResponseFiles
└── unplatform
```
---
## Derived Data
Derived data is one which we can pull out from above complete data by running scripts on it.
### 1. Activity Timestamp [format: CSV]
1.1 For **Single User**:
- Script Name: `activity_timestamp`
- Prerequisite:
- Get in docker (ref. Utility 1)
- Reach at required gstudio location within docker (ref. Utility 2)
- [ RUN ]: `python manage.py activity_timestamp <username>`
- Example [ RUN ]: `python manage.py activity_timestamp green-apple-sp100`
1.2 For **all user's**:
- Script Name: `get_all_users_activity_timestamp_csvs.py`
- Prerequisite:
- Get in docker (ref. Utility 1)
- Reach at required gstudio location within docker (ref. Utility 2)
- Get into project shell (ref. Utility 3)
- [ RUN ]: `execfile('../doc/deployer/get_all_users_activity_timestamp_csvs.py')`
### 2. Progress CSV's: [format: CSV]
- Script name: `export_users_analytics`
- Prerequisite:
- Get in docker (ref. Utility 1)
- Reach at required gstudio location within docker (ref. Utility 2)
- [ RUN ]: `python manage.py export_users_analytics`
### 3. Assessments data [format: DB data dumps]
- Script Name: `single_school_get_MIT_activity_data.py`
- Prerequisite:
- Get in docker (ref. Utility 1)
- Reach at required gstudio location within docker (ref. Utility 4)
- [ RUN ]: `python single_school_get_MIT_activity_data.py`
---
### Utility Commands:
#### 1. Get into docker [ RUN ]:
- `docker exec -it gstudio bash`
#### 2. Reach at required `gstudio` location within docker [ RUN ]:
- `cd /home/docker/code/gstudio/gnowsys-ndf`
#### 3. Get into project shell [ RUN ]:
- `python manage.py shell`
#### 4. Reach at required `qbank-gstudio-scripts` location within docker [ RUN ]:
- `cd qbank-gstudio-scripts/single_school_get_MIT_activity_data/`
\ No newline at end of file
......@@ -107,4 +107,4 @@ def main():
if __name__ == '__main__':
main()
\ No newline at end of file
main()
......@@ -204,7 +204,7 @@ class Filehive(DjangoDocument):
file_blob.seek(0, os.SEEK_END)
file_size = file_blob.tell()
file_blob.seek(0)
except Exception, e:
except Exception as e:
print "Exception in calculating file_size: ", e
file_size = 0
......@@ -218,7 +218,7 @@ class Filehive(DjangoDocument):
else:
try:
image_dimension_tuple = get_image_dimensions(file_blob)
except Exception, e:
except Exception as e:
print "Exception in calculating file dimensions: ", e
pass
......@@ -306,7 +306,7 @@ class Filehive(DjangoDocument):
try:
img = Image.open(StringIO(files.read()))
except Exception, e:
except Exception as e:
print "Exception in opening file with PIL.Image.Open(): ", e
return None, None
......@@ -343,7 +343,7 @@ class Filehive(DjangoDocument):
return mid_size_img, img_size
except Exception, e:
except Exception as e:
print "Exception in converting image to mid size: ", e
return None
......
......@@ -216,7 +216,7 @@ class GSystem(Node):
try:
if md5_or_relurl:
file_blob = gfs.open(md5_or_relurl)
except Exception, e:
except Exception as e:
print "File '", md5_or_relurl, "' not found: ", e
return file_blob
......
......@@ -292,5 +292,5 @@ class HistoryManager():
print "\nDeleted RCS json version file : ", version_file_path
os.remove(json_file_path)
print "\nDeleted RCS json file : ", json_file_path
except Exception, e:
except Exception as e:
print "\nException occured while deleting RCS file for node '", node_obj._id.__str__(), "' : ", e
......@@ -6,7 +6,7 @@ class NodeJSONEncoder(json.JSONEncoder):
return str(o)
if isinstance(o, datetime.datetime):
return o.strftime("%d/%m/%Y %H:%M:%S")
return o.strftime("%d/%m/%Y %H:%M:%S:%f")
return json.JSONEncoder.default(self, o)
......@@ -134,7 +134,7 @@ class node_holder(DjangoDocument):
# try:
# active_loggedin_and_buddy_users_group = DjangoGroup.objects.get_or_create(name=django_active_users_group_name)[0]
# except Exception, e:
# except Exception as e:
# print e
# pass
......
......@@ -571,7 +571,7 @@ class Node(DjangoDocument):
# print "=== removed ", each_invalid_field, ' : ',
except Exception, e:
except Exception as e:
print e
pass
......@@ -615,7 +615,7 @@ class Node(DjangoDocument):
for each_invalid_field in invalid_fields:
if each_invalid_field in self_keys:
self.pop(each_invalid_field)
except Exception, e:
except Exception as e:
print "\nError while processing invalid fields: ", e
pass
......
......@@ -432,7 +432,7 @@ var animalList = [
},
{
id: 58,
name: 'Honeybee',
name: 'honeybee',
svg: `<svg xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" width="100" height="100" viewBox="0 0 100 100" version="1.1">
<g transform="translate(-721.42857,-650.93361)">
<path
......@@ -500,7 +500,7 @@ var animalList = [
},
{
id: 59,
name: 'Lobster',
name: 'lobster',
svg: `<svg xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" width="100" height="100" viewBox="0 0 100 100" version="1.1">
<g transform="translate(-721.42857,-650.93361)">
<path
......@@ -679,7 +679,7 @@ var animalList = [
},
{
id: 60,
name: 'Scorpion',
name: 'scorpion',
svg: `<svg xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" width="100" height="100" viewBox="0 0 100 100" version="1.1">
<g transform="translate(-721.42857,-650.93361)">
<path
......@@ -724,7 +724,7 @@ var animalList = [
},
{
id: 61,
name: 'Sheep',
name: 'sheep',
svg: `<svg xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" width="100" height="100" viewBox="0 0 100 100" version="1.1">
<g transform="translate(-721.42857,-650.93361)">
<path
......@@ -758,7 +758,7 @@ var animalList = [
},
{
id: 62,
name: 'Squid',
name: 'squid',
svg: `<svg xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" width="100" height="100" viewBox="0 0 100 100" version="1.1">
<g transform="translate(-721.42857,-650.93361)">
<path
......
......@@ -232,6 +232,9 @@
for (var i = 0; i != iframes.length; ++i) {
var match = iframes[i].src.search(key);
// console.log(match);
// iframes[i].replace(/&amp;/g, '&')
// match_ = match;
if(match != -1) {
source_attr = iframes[i].getAttribute('src')
oac_url = 'oac'
......@@ -276,6 +279,9 @@
}
}
}
setTimeout(function() {for(i=0; i<iframes.length; i++){if(iframes[i].src.search(key)) {iframes[i].src = iframes[i].src} }}, 5000);
})
$(".admin-page").click(function(event){
......
......@@ -21,11 +21,11 @@ from django.contrib.auth.admin import User
from gnowsys_ndf.ndf.models import GSystemType, GSystem , Group #, Node, GSystem #, Triple
from gnowsys_ndf.ndf.models import NodeJSONEncoder
from gnowsys_ndf.ndf.models import node_collection,triple_collection
from gnowsys_ndf.ndf.views.methods import get_group_name_id
from gnowsys_ndf.ndf.views.methods import get_group_name_id, cast_to_data_type
gst_api_fields_dict = { "_id": 1, "name": 1, "altnames": 1, "language": 1, "content": 1, "if_file": 1, "tags": 1, "location": 1, "created_by": 1, "modified_by": 1, "contributors": 1, "legal": 1, "rating": 1, "created_at": 1, "last_update": 1, "collection_set": 1, "post_node": 1, "prior_node": 1, "access_policy": 1, "status": 1, "group_set": 1, "member_of": 1, "type_of": 1,
# "relation_set": 1, "attribute_set": 1,
"relation_set": 1 #,"attribute_set": 1,
}
api_name_model_name_dict = {
......@@ -35,7 +35,6 @@ api_name_model_name_dict = {
def api_get_gs_nodes(request):
get_parameters_dict = request.GET.dict()
if not get_parameters_dict:
aggregated_dict = gst_api_fields_dict.copy()
......@@ -61,6 +60,7 @@ def api_get_gs_nodes(request):
# oid_name_dict[group_id] = group_name
gsystem_structure_dict = GSystem.structure
gsystem_structure_dict.update({ '_id': ObjectId })
gsystem_keys = gsystem_structure_dict.keys()
gst_all_fields_dict = {i: 1 for i in gsystem_keys}
......@@ -97,9 +97,10 @@ def api_get_gs_nodes(request):
get_resource_type = request.GET.get('resource_type', None)
if get_resource_type:
gst_name, gst_id = GSystemType.get_gst_name_id(get_resource_type)
oid_name_dict[gst_id] = gst_name
get_parameters_dict['member_of'] = gst_id
attributes = sample_gs.get_possible_attributes([gst_id])
if gst_id:
oid_name_dict[gst_id] = gst_name
get_parameters_dict['member_of'] = gst_id
attributes = sample_gs.get_possible_attributes([gst_id])
get_workspace = request.GET.get('workspace', None)
if get_workspace:
......@@ -109,14 +110,15 @@ def api_get_gs_nodes(request):
for key, val in get_parameters_dict.iteritems():
stripped_key = key.split('.')[0]
if stripped_key in gsystem_keys:
query_dict.update({ key: ({'$regex': val, '$options': 'i'} if isinstance(gsystem_structure_dict[stripped_key], basestring or unicode) else val) })
if stripped_key in (gsystem_keys):
query_dict.update({ key: ({'$regex': val, '$options': 'i'} if isinstance(gsystem_structure_dict[stripped_key], basestring or unicode) else cast_to_data_type(val, gsystem_structure_dict[stripped_key])) })
elif stripped_key in gst_attributes(gst_id):
query_dict.update({('attribute_set.' + stripped_key): {'$regex': val, '$options': 'i'}})
# print "query_dict: ", query_dict
# making explicit human as decision taken
human = eval(request.GET.get('human', '1'))
gst_fields = gst_api_fields_dict if human else gst_all_fields_dict
......
......@@ -2085,7 +2085,10 @@ def cast_to_data_type(value, data_type):
# print "\n\t\tin method: ", value, " == ", data_type
if data_type != "list":
value = value.strip()
try:
value = value.strip()
except Exception as e:
pass
casted_value = value
if data_type == "unicode":
casted_value = unicode(value)
......@@ -2126,11 +2129,20 @@ def cast_to_data_type(value, data_type):
casted_value = [i.strip() for i in value if i]
# print "casted_value",casted_value
elif data_type == "datetime.datetime":
# "value" should be in following example format
# In [10]: datetime.strptime( "11/12/2014", "%d/%m/%Y")
# Out[10]: datetime(2014, 12, 11, 0, 0)
casted_value = datetime.strptime(value, "%d/%m/%Y")
elif (data_type == "datetime.datetime") or (str(data_type) == "<type 'datetime.datetime'>"):
try:
# "value" should be in following example format
# In [10]: datetime.strptime( "11/12/2014", "%d/%m/%Y")
# Out[10]: datetime(2014, 12, 11, 0, 0)
casted_value = datetime.strptime(value, "%d/%m/%Y")
except Exception as e:
casted_value = datetime.strptime(value, "%d/%m/%Y %H:%M:%S:%f")
elif (str(data_type) == "<class 'bson.objectid.ObjectId'>") or isinstance(data_type, (ObjectId, bson.objectid.ObjectId)):
try:
casted_value = ObjectId(value)
except Exception as e:
pass
return casted_value
......
......@@ -151,24 +151,37 @@ def module_detail(request, group_id, node_id,title=""):
'''
primary_lang_tuple = get_language_tuple(GSTUDIO_PRIMARY_COURSE_LANGUAGE)
if title == "courses":
module_detail_query.update({'$or': [
{'$and': [
{'member_of': {'$in': [gst_announced_unit_id, gst_ce_id]}},
{'$or': [
{'created_by': request.user.id},
{'group_admin': request.user.id},
{'author_set': request.user.id},
{
'$and': [
{'group_type': u'PUBLIC'},
{'language': primary_lang_tuple},
]
},
]}
]},
#{'member_of': gst_announced_unit_id }
]})
# module_detail_query.update({'$or': [
# {'$and': [
# {'member_of': {'$in': [gst_announced_unit_id, gst_ce_id]}},
# {'$or': [
# {'created_by': request.user.id},
# {'group_admin': request.user.id},
# {'author_set': request.user.id},
# {
# '$and': [
# {'group_type': u'PUBLIC'},
# {'language': primary_lang_tuple},
# ]
# },
# ]}
# ]},
# #{'member_of': gst_announced_unit_id }
# ]})
#
# # above can be delete after robust testing of following new query:
module_detail_query.update({
'status': 'PUBLISHED',
'$or': [
{'group_admin': request.user.id},
{'created_by': request.user.id},
{'author_set': request.user.id},
{'member_of': gst_announced_unit_id},
{'language': primary_lang_tuple, 'group_type': u'PUBLIC', 'member_of': gst_ce_id}
]
})
if title == "drafts":
module_detail_query.update({'$or': [
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment