commit
ccd4dc6f50
@ -0,0 +1,26 @@
|
|||||||
|
# You can change these variables to use a different base image, but
|
||||||
|
# you must ensure that your base image inherits from one of ours.
|
||||||
|
# You can also override these at build time with --build-arg flags
|
||||||
|
ARG BASE_REPO=gradescope/autograder-base
|
||||||
|
ARG TAG=latest
|
||||||
|
|
||||||
|
FROM ${BASE_REPO}:${TAG}
|
||||||
|
|
||||||
|
ADD source /autograder/source
|
||||||
|
|
||||||
|
RUN cp /autograder/source/run_autograder /autograder/run_autograder
|
||||||
|
|
||||||
|
# Ensure that scripts are Unix-friendly and executable
|
||||||
|
RUN dos2unix /autograder/run_autograder /autograder/source/setup.sh
|
||||||
|
RUN chmod +x /autograder/run_autograder
|
||||||
|
|
||||||
|
# Do whatever setup was needed in setup.sh, including installing apt packages
|
||||||
|
# Cleans up the apt cache afterwards in the same step to keep the image small
|
||||||
|
RUN apt-get update && \
|
||||||
|
bash /autograder/source/setup.sh && \
|
||||||
|
apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||||
|
|
||||||
|
# You can also use RUN commands in the Dockerfile to install things
|
||||||
|
# instead of using a bash script
|
||||||
|
|
||||||
|
# The base image defines the CMD and ENTRYPOINT, so don't redefine those
|
@ -0,0 +1 @@
|
|||||||
|
{"output": "", "score": 3, "max_score": 8}
|
@ -0,0 +1,59 @@
|
|||||||
|
import pymongo, json
|
||||||
|
|
||||||
|
# dbprep
|
||||||
|
fsroot = '/autograder/source/'
|
||||||
|
datasets = ['congress', 'bills']
|
||||||
|
db = pymongo.MongoClient('mongodb://127.0.0.1')['test']
|
||||||
|
|
||||||
|
def evaluate(query : str):
|
||||||
|
import re
|
||||||
|
query = re.sub(r'(\$?[\d\w]+)\s*:', r'"\1" :', query)
|
||||||
|
query = re.sub(r'[\r|\n]|.\s*pretty\s*\(\s*\)', '', query).strip()
|
||||||
|
return list(eval(query))[0] if query else None
|
||||||
|
|
||||||
|
for d in datasets:
|
||||||
|
with open(fsroot + d + '.json', encoding = 'utf-8') as f:
|
||||||
|
db[d].insert_many(json.load(f))
|
||||||
|
|
||||||
|
from solution import sols
|
||||||
|
answers = [evaluate(s) for s in sols]
|
||||||
|
|
||||||
|
# grading
|
||||||
|
from os import listdir
|
||||||
|
from importlib.util import module_from_spec, spec_from_file_location
|
||||||
|
subroot = '/autograder/submission/'
|
||||||
|
feedback = ''
|
||||||
|
submissions = [subroot + f for f in listdir(subroot) if f.strip().lower().endswith('.py')]
|
||||||
|
|
||||||
|
grade = 0
|
||||||
|
n_queries = len(sols)
|
||||||
|
|
||||||
|
if submissions:
|
||||||
|
submission = submissions[0]
|
||||||
|
|
||||||
|
for i in range(n_queries):
|
||||||
|
feedback += f'Query {i + 1}: '
|
||||||
|
try:
|
||||||
|
spec = spec_from_file_location('curr', submission)
|
||||||
|
module = module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(module)
|
||||||
|
q = getattr(module, f'query{i + 1}')()
|
||||||
|
if evaluate(q) == answers[i]:
|
||||||
|
grade += 1
|
||||||
|
feedback += 'Correct.\n'
|
||||||
|
else:
|
||||||
|
feedback += 'Wrong Answer.\n'
|
||||||
|
except Exception:
|
||||||
|
feedback += 'Runtime Error.\n'
|
||||||
|
else:
|
||||||
|
feedback += 'No python file in submission.\n'
|
||||||
|
|
||||||
|
# output
|
||||||
|
results = {
|
||||||
|
'output': feedback,
|
||||||
|
'score': grade * 100 / n_queries,
|
||||||
|
'max_score': 100,
|
||||||
|
}
|
||||||
|
|
||||||
|
with open('/autograder/results/results.json', 'w') as res:
|
||||||
|
json.dump(results, res)
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,20 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
mkdir -p /autograder/results/
|
||||||
|
|
||||||
|
cd /data
|
||||||
|
mongod > /dev/null 2>&1 &
|
||||||
|
cd -
|
||||||
|
|
||||||
|
python3 /autograder/source/autograder.py
|
||||||
|
|
||||||
|
# files_submitted=$(ls /autograder/submission)
|
||||||
|
# num_files_submitted=$(ls /autograder/submission/ | wc -l)
|
||||||
|
|
||||||
|
# cat > /autograder/results/results.json <<EOF
|
||||||
|
# {
|
||||||
|
# "output": "Good job, you passed! Submitted ${num_files_submitted} files: ${files_submitted}",
|
||||||
|
# "score": 10,
|
||||||
|
# "max_score": 10
|
||||||
|
# }
|
||||||
|
# EOF
|
@ -0,0 +1,10 @@
|
|||||||
|
apt install -y curl python3 python3-pip
|
||||||
|
curl -fsSL https://pgp.mongodb.com/server-7.0.asc | gpg -o /usr/share/keyrings/mongodb-server-7.0.gpg --dearmor
|
||||||
|
export OS_VER=`cat /etc/os-release | grep VERSION_CODENAME` &&\
|
||||||
|
export OS_VER=${OS_VER#*=} &&\
|
||||||
|
printf "deb [ arch=amd64,arm64 signed-by=/usr/share/keyrings/mongodb-server-7.0.gpg ] https://repo.mongodb.org/apt/ubuntu "${OS_VER}"/mongodb-org/7.0 multiverse\n">/etc/apt/sources.list.d/mongodb.list
|
||||||
|
apt update
|
||||||
|
apt install -y mongodb-org
|
||||||
|
mkdir -p /data/db
|
||||||
|
cd /data
|
||||||
|
python3 -m pip install pymongo
|
@ -0,0 +1,47 @@
|
|||||||
|
sols = ['db.bills.find({sponsor_name:"Marco Rubio"}, {_id:0, title: 1, sponsor_name:1, sponsor_state:1})',
|
||||||
|
'db.bills.find({cosponsors:{$gte:3, $lte:5}}, {_id:0, title:1, sponsor_name:1, cosponsors:1})',
|
||||||
|
'db.bills.find({$or:[{cosponsors:{$gte:3, $lte:5}}, {sponsor_name:"Marco Rubio"}]}, {_id:0, title:1, sponsor_name:1, cosponsors:1})',
|
||||||
|
'''db.congress.aggregate([
|
||||||
|
{ $match: {"state": "IN"} },
|
||||||
|
{ $group: {_id: "$role_type", count_of_this_role: {$sum:1} } },
|
||||||
|
{ $sort: {count_of_this_role: -1} },
|
||||||
|
{ $project: {_id: 1, count_of_this_role: 1} }
|
||||||
|
])''',
|
||||||
|
'''db.bills.aggregate([
|
||||||
|
{ $lookup: {
|
||||||
|
from: "congress",
|
||||||
|
localField: "sponsor_id",
|
||||||
|
foreignField: "person.bioguideid",
|
||||||
|
as:"congressMember"} },
|
||||||
|
{ $unwind: "$congressMember" },
|
||||||
|
{ $project: {_id: 0, title:1, sponsor_name: 1, "description": "$congressMember.description", "DOB": "$congressMember.person.birthday"} }
|
||||||
|
])''',
|
||||||
|
'''db.bills.aggregate([
|
||||||
|
{ $unwind: "$committee_codes" },
|
||||||
|
{ $project: {committee_codes: 1} },
|
||||||
|
{ $group: {_id: "$committee_codes", countOfCommittee: {$sum:1} } },
|
||||||
|
{ $sort: {countOfCommittee: -1} },
|
||||||
|
])''',
|
||||||
|
'''db.bills.aggregate([
|
||||||
|
{ $project: {_id: 1, title:1, sponsor_name: 1, sponsor_state:1}},
|
||||||
|
{ $lookup: {
|
||||||
|
from: "bills",
|
||||||
|
localField: "sponsor_state",
|
||||||
|
foreignField: "sponsor_state",
|
||||||
|
as:"otherBills"} },
|
||||||
|
{ $unwind: "$otherBills" },
|
||||||
|
{ $project: {title: 1, sponsor_name: 1, sponsor_state: 1, otherbill_id: "$otherBills._id", otherbill_title: "$otherBills.title", otherbill_sponser_name: "$otherBills.sponsor_name", otherbill_sponsor_state: "$otherBills.sponsor_state"}},
|
||||||
|
{ $match: {$expr: {$lt: ["$_id", "$otherbill_id"]}}}
|
||||||
|
])''',
|
||||||
|
'''db.congress.aggregate([
|
||||||
|
{ $project: {_id: 1, firstname: "$person.firstname", lastname: "$person.lastname", state: 1}},
|
||||||
|
{ $lookup: {
|
||||||
|
from: "congress",
|
||||||
|
localField: "lastname",
|
||||||
|
foreignField: "person.lastname",
|
||||||
|
as:"otherPersons"} },
|
||||||
|
{ $unwind: "$otherPersons" },
|
||||||
|
{ $match: {$expr: {$lt: ["$_id", "$otherPersons._id"]}}},
|
||||||
|
{ $project: {_id:1, firstname: 1, lastname: 1, state:1, otherPerson_id: "$otherPersons._id", otherPerson_firstname: "$otherPersons.person.firstname", otherPerson_lastname: "$otherPersons.person.lastname", otherPerson_state: "$otherPersons.state"}},
|
||||||
|
{ $match: {$expr: {$eq: ["$state", "$otherPerson_state"]}}},
|
||||||
|
])''']
|
@ -0,0 +1,26 @@
|
|||||||
|
# You can change these variables to use a different base image, but
|
||||||
|
# you must ensure that your base image inherits from one of ours.
|
||||||
|
# You can also override these at build time with --build-arg flags
|
||||||
|
ARG BASE_REPO=gradescope/autograder-base
|
||||||
|
ARG TAG=latest
|
||||||
|
|
||||||
|
FROM ${BASE_REPO}:${TAG}
|
||||||
|
|
||||||
|
ADD source /autograder/source
|
||||||
|
|
||||||
|
RUN cp /autograder/source/run_autograder /autograder/run_autograder
|
||||||
|
|
||||||
|
# Ensure that scripts are Unix-friendly and executable
|
||||||
|
RUN dos2unix /autograder/run_autograder /autograder/source/setup.sh
|
||||||
|
RUN chmod +x /autograder/run_autograder
|
||||||
|
|
||||||
|
# Do whatever setup was needed in setup.sh, including installing apt packages
|
||||||
|
# Cleans up the apt cache afterwards in the same step to keep the image small
|
||||||
|
RUN apt-get update && \
|
||||||
|
bash /autograder/source/setup.sh && \
|
||||||
|
apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||||
|
|
||||||
|
# You can also use RUN commands in the Dockerfile to install things
|
||||||
|
# instead of using a bash script
|
||||||
|
|
||||||
|
# The base image defines the CMD and ENTRYPOINT, so don't redefine those
|
@ -0,0 +1 @@
|
|||||||
|
docker build -t sunyinqi0508/neo4j_autograder . && docker push sunyinqi0508/neo4j_autograder
|
@ -0,0 +1,47 @@
|
|||||||
|
CREATE
|
||||||
|
(gorman:Author {name:"Gorman, Michael"}),
|
||||||
|
(toner:Author {name:"Toner, Patrick"}),
|
||||||
|
(koslicki:Author {name:"Koslicki, Kathrin"}),
|
||||||
|
(fine:Author {name:"Fine, Kit"}),
|
||||||
|
(roger:Author {name:"Roger, Cat"}),
|
||||||
|
(beth:Author {name:"Beth, Dog"}),
|
||||||
|
(art1:Article {doi:"10.5840/ipq20064626", title:"Independence and Substance"}),
|
||||||
|
(art2:Article {doi:"10.1007/s11098-010-9521-4", title:"Independence Accounts of Substance and Substantial Parts"}),
|
||||||
|
(art3:Article {doi:"10.1007/s11098-011-9708-3", title:"On Substantial Independence: a Reply to Patrick Toner"}),
|
||||||
|
(art4:Article {doi:"10.1080/05568640609485174", title:"Substance and Identity-Dependence"}),
|
||||||
|
(art5:Article {doi:"10.2307/4545221", title:"Ontological Dependence"}),
|
||||||
|
(art6:Article {doi:"10.2307/4519752", title:"Yet Another Title"}),
|
||||||
|
(art7:Article {doi:"10.2308/4547590", title:"Seventh Title"}),
|
||||||
|
(chp1:Chapter {no:9, title:"Substance, Independence, and Unity"}),
|
||||||
|
(chp2:Chapter {no:10, title:"Chapter on Making Chapters"}),
|
||||||
|
(ipq:Journal {title:"International Philosophical Quarterly", ISSN:"0019-0365", onlineISSN:"2153-8077"}),
|
||||||
|
(ps:Journal {title:"Philosophical Studies", ISSN:"0031-8116", onlineISSN:"1573-0883"}),
|
||||||
|
(pp:Journal {title:"Philosophical Papers", ISSN:"0556-8641", onlineISSN:"1996-8523"}),
|
||||||
|
(pas:Journal {title:"Proceedings of the Aristotelian Society", ISSN:"0066-7374", onlineISSN:"1467-9264"}),
|
||||||
|
(hitm:Journal {title:"History in the making", ISSN:"0084-7649", onlineISSN:"1235-7549"}),
|
||||||
|
(ssj:Journal {title:"Something Something Journal", ISSN:"0420-6729", onlineISSN:"5964-3248"}),
|
||||||
|
(gorman)-[:WRITES]->(art1)-[:IN {pp:[147,159]} ]->(:Issue {volume:46, issue:2, year:2006, month:6})-[:OF]->(ipq),
|
||||||
|
(toner)-[:WRITES]->(art2)-[:IN {pp:[37,43]} ]->(:Issue {volume:155, issue:1, year:2011, month:8})-[:OF]->(ps),
|
||||||
|
(gorman)-[:WRITES]->(art3)-[:IN {pp:[239,297]} ]->(:Issue {volume:159, issue:2, year:2012, month:6})-[:OF]->(ps),
|
||||||
|
(gorman)-[:WRITES]->(art4)-[:IN {pp:[103,118]} ]->(:Issue {volume:35, issue:1, year:2006, month:3})-[:OF]->(pp),
|
||||||
|
(fine)-[:WRITES]->(art5)-[:IN {pp:[269,290]} ]->(:Issue {volume:95, year:1995})-[:OF]->(pas),
|
||||||
|
(roger)-[:WRITES]->(art6)-[:IN {pp:[206,300]} ]->(:Issue {volume:24, year:1996})-[:OF]->(hitm),
|
||||||
|
(beth)-[:WRITES]->(art7)-[:IN {pp:[0,5]} ]->(:Issue {volume:32, year:1903})-[:OF]->(ssj),
|
||||||
|
(koslicki)-[:WRITES]->(chp1)-[:IN {pp:[169,195]} ]->(book:Book {title:"Aristotle on Method and Metaphysics", `ISBN-10`:"0230360912", `ISBN-13`:"978-0230360914", year:2013, month:7})<-[:EDITS]-(feser:Author {name:"Feser, Edward"}),
|
||||||
|
(beth)-[:WRITES]->(chp2)-[:IN {pp:[104,109]} ]->(book2:Book {title:"Book Name is Two", `ISBN-10`:"023546382", `ISBN-13`:"978-0230346584", year:2003, month:9})<-[:EDITS]-(tim:Author {name:"Tim, Bob"}),
|
||||||
|
(book)<-[:PUBLISHED_BY]-(pub:Publisher {location:"London", name:"Palgrave Macmillan"}),
|
||||||
|
(book2)<-[:PUBLISHED_BY]-(pub2:Publisher {location:"Madagascar", name:"Alex Lion"}),
|
||||||
|
(art1)-[:CITES]->(art5),
|
||||||
|
(art2)-[:CITES]->(art1),
|
||||||
|
(art2)-[:CITES]->(art4),
|
||||||
|
(art3)-[:CITES]->(art2),
|
||||||
|
(art3)-[:CITES]->(art4),
|
||||||
|
(art5)-[:CITES]->(art6),
|
||||||
|
(art7)-[:CITES]->(art3),
|
||||||
|
(art6)-[:CITES]->(art1),
|
||||||
|
(chp1)-[:CITES]->(art1),
|
||||||
|
(chp1)-[:CITES]->(art2),
|
||||||
|
(chp2)-[:CITES]->(art3),
|
||||||
|
(chp1)-[:CITES]->(art6),
|
||||||
|
(chp2)-[:CITES]->(art7),
|
||||||
|
(chp1)-[:CITES]->(art3)
|
@ -0,0 +1,70 @@
|
|||||||
|
import neo4j, json
|
||||||
|
|
||||||
|
# dbprep
|
||||||
|
fsroot = '/autograder/source/'
|
||||||
|
datasets = ['Neo4J_dataset']
|
||||||
|
db = neo4j.GraphDatabase.driver('bolt://localhost:7687', auth = ('neo4j', '4Sfz541Lm')).session()
|
||||||
|
def evaluate(query : str):
|
||||||
|
query = query.strip()
|
||||||
|
return db.run(query).data() if query else None
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
db.run('RETURN 0')
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for d in datasets:
|
||||||
|
with open(fsroot + d + '.txt', encoding = 'utf-8') as f:
|
||||||
|
db.run(f.read())
|
||||||
|
|
||||||
|
from solution import sols
|
||||||
|
answers = [evaluate(s) if type(s) is str else tuple(evaluate(k) for k in s) for s in sols ]
|
||||||
|
|
||||||
|
# grading
|
||||||
|
from os import listdir
|
||||||
|
from importlib.util import module_from_spec, spec_from_file_location
|
||||||
|
subroot = '/autograder/submission/'
|
||||||
|
feedback = ''
|
||||||
|
submissions = [subroot + f for f in listdir(subroot) if f.strip().lower().endswith('.py')]
|
||||||
|
|
||||||
|
grade = 0
|
||||||
|
n_queries = len(sols)
|
||||||
|
|
||||||
|
if submissions:
|
||||||
|
submission = submissions[0]
|
||||||
|
|
||||||
|
for i in range(n_queries):
|
||||||
|
feedback += f'Query {i + 1}: '
|
||||||
|
try:
|
||||||
|
spec = spec_from_file_location('curr', submission)
|
||||||
|
module = module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(module)
|
||||||
|
q = getattr(module, f'query{i + 1}')()
|
||||||
|
def eq(a: list, b):
|
||||||
|
if a is None: return False
|
||||||
|
if type(b) is tuple:
|
||||||
|
return any(eq(a, bb) for bb in b)
|
||||||
|
if len(a) != len(b): return False
|
||||||
|
return all(aa == bb for aa, bb in zip(a, b))
|
||||||
|
|
||||||
|
if eq(evaluate(q), answers[i]):
|
||||||
|
grade += 1
|
||||||
|
feedback += 'Correct.\n'
|
||||||
|
else:
|
||||||
|
feedback += 'Wrong Answer.\n'
|
||||||
|
except Exception:
|
||||||
|
feedback += 'Runtime Error.\n'
|
||||||
|
else:
|
||||||
|
feedback += 'No python file in submission.\n'
|
||||||
|
|
||||||
|
# output
|
||||||
|
results = {
|
||||||
|
'output': feedback,
|
||||||
|
'score': round(grade * 100 / n_queries, 1),
|
||||||
|
'max_score': 100,
|
||||||
|
}
|
||||||
|
|
||||||
|
with open('/autograder/results/results.json', 'w') as res:
|
||||||
|
json.dump(results, res)
|
@ -0,0 +1,18 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
mkdir -p /autograder/results/
|
||||||
|
|
||||||
|
service neo4j start
|
||||||
|
|
||||||
|
python3 /autograder/source/autograder.py
|
||||||
|
|
||||||
|
# files_submitted=$(ls /autograder/submission)
|
||||||
|
# num_files_submitted=$(ls /autograder/submission/ | wc -l)
|
||||||
|
|
||||||
|
# cat > /autograder/results/results.json <<EOF
|
||||||
|
# {
|
||||||
|
# "output": "Good job, you passed! Submitted ${num_files_submitted} files: ${files_submitted}",
|
||||||
|
# "score": 10,
|
||||||
|
# "max_score": 10
|
||||||
|
# }
|
||||||
|
# EOF
|
@ -0,0 +1,10 @@
|
|||||||
|
apt install -y curl python3 python3-pip
|
||||||
|
curl -fsSL https://debian.neo4j.com/neotechnology.gpg.key | gpg --dearmor -o /usr/share/keyrings/neo4j.gpg
|
||||||
|
echo "deb [signed-by=/usr/share/keyrings/neo4j.gpg] https://debian.neo4j.com stable latest" > /etc/apt/sources.list.d/neo4j.list
|
||||||
|
apt update
|
||||||
|
apt install -y neo4j
|
||||||
|
sed -i '1s/^/server.default_listen_address=0.0.0.0\n/' /etc/neo4j/neo4j.conf
|
||||||
|
systemctl enable --now neo4j
|
||||||
|
neo4j-admin dbms set-initial-password 4Sfz541Lm --require-password-change=false
|
||||||
|
|
||||||
|
python3 -m pip install neo4j
|
@ -0,0 +1,73 @@
|
|||||||
|
sols = [
|
||||||
|
'''
|
||||||
|
MATCH (author:Author)-[:WRITES]->(article:Article)
|
||||||
|
RETURN author.name, article.title
|
||||||
|
''',
|
||||||
|
('''
|
||||||
|
match (author:Author)-[:WRITES]->(paper_or_chapter)
|
||||||
|
return author.name, paper_or_chapter.title, labels(paper_or_chapter)
|
||||||
|
''',
|
||||||
|
'''
|
||||||
|
MATCH (author:Author)-[:WRITES]->(paper_or_chapter)
|
||||||
|
WITH author, paper_or_chapter, labels(paper_or_chapter) as publicationType
|
||||||
|
WHERE publicationType=["Article"] or publicationType=["Chapter"]
|
||||||
|
RETURN author.name, paper_or_chapter.title, publicationType
|
||||||
|
'''),
|
||||||
|
('''
|
||||||
|
match (author:Author)-[:WRITES|EDITS]->(publication)
|
||||||
|
return author.name, publication.title, labels(publication)
|
||||||
|
''',
|
||||||
|
'''
|
||||||
|
MATCH (author:Author)-[]->(publication)
|
||||||
|
WHERE publication:Article or publication:Chapter or publication:Book
|
||||||
|
RETURN author.name, publication.title, labels(publication)
|
||||||
|
'''),
|
||||||
|
('''
|
||||||
|
match (author:Author)-[:WRITES|EDITS]->(publication)
|
||||||
|
return author.name, count(publication) as publication_count
|
||||||
|
''',
|
||||||
|
'''
|
||||||
|
MATCH (author:Author)-[]->(publication)
|
||||||
|
WHERE publication:Article OR publication:Chapter or publication:Book
|
||||||
|
RETURN author.name, count(publication) as publication_count
|
||||||
|
'''),
|
||||||
|
('''
|
||||||
|
match(a:Article)-[in:IN]->(:Issue)
|
||||||
|
where (in.pp[1] - in.pp[0]) <= 10
|
||||||
|
return a.title, (in.pp[1] - in.pp[0] + 1) as NumberOfPages
|
||||||
|
''',
|
||||||
|
'''
|
||||||
|
MATCH (a:Article)-[e:IN]->(:Issue)
|
||||||
|
WITH a, e.pp[1]-e.pp[0]+1 as NumberOfPages
|
||||||
|
WHERE NumberOfPages <=10
|
||||||
|
RETURN a.title, NumberOfPages
|
||||||
|
'''),
|
||||||
|
'''
|
||||||
|
MATCH (p2:Article)<-[]-(a1:Author)-[]->(p1:Article),(p1)-[:CITES]->(p2)
|
||||||
|
RETURN a1.name, p1.title, p2.title
|
||||||
|
''',
|
||||||
|
'''
|
||||||
|
MATCH (publication)<-[:CITES]-()
|
||||||
|
WITH publication, count(*) as publication_count
|
||||||
|
WHERE (publication:Article or publication:Chapter) and publication_count >= 2
|
||||||
|
RETURN publication.title, publication_count
|
||||||
|
''',
|
||||||
|
('''
|
||||||
|
match (journal:Journal)<-[:OF]-(issue:Issue)<-[:IN]-(article:Article)<-[:CITES]-(someOtherPublication)
|
||||||
|
with journal, article, count(someOtherPublication) as citations_count
|
||||||
|
where citations_count > 1
|
||||||
|
match (article)<-[:WRITES]-(author:Author)
|
||||||
|
return journal.title, article.title, citations_count, author.name
|
||||||
|
''',
|
||||||
|
'''
|
||||||
|
MATCH (author:Author)-[:WRITES]->(article:Article)-[:IN]->()-[]->(journal:Journal), ()-[e:CITES]->(article:Article)
|
||||||
|
WITH journal, article, author, count(e) as citation_count
|
||||||
|
WHERE citation_count >= 2
|
||||||
|
RETURN journal.title, article.title, citation_count, author.name
|
||||||
|
'''),
|
||||||
|
'''
|
||||||
|
MATCH (a1:Article)-[:IN]->(i1:Issue)-[]->(:Journal)<-[]-(i2:Issue)<-[]-(a2:Article)
|
||||||
|
WHERE (a1)-[:CITES]->(a2)
|
||||||
|
RETURN a1.title, i1.issue, a2.title, i2.issue
|
||||||
|
'''
|
||||||
|
]
|
Loading…
Reference in new issue