diff --git a/.rubocop.yml b/.rubocop.yml new file mode 100644 index 00000000..73161097 --- /dev/null +++ b/.rubocop.yml @@ -0,0 +1,50 @@ +--- + require: + - rubocop-performance + - rubocop-rspec + + AllCops: + DisplayCopNames: true + NewCops: enable + TargetRubyVersion: 2.6 + + Layout/ArgumentAlignment: + # https://www.rubydoc.info/github/bbatsov/RuboCop/RuboCop/Cop/Layout/ArgumentAlignment + EnforcedStyle: with_fixed_indentation + Layout/CaseIndentation: + EnforcedStyle: end + Layout/EndAlignment: + # https://www.rubydoc.info/gems/rubocop/RuboCop/Cop/Layout/EndAlignment + EnforcedStyleAlignWith: variable + Layout/FirstArgumentIndentation: + # https://www.rubydoc.info/github/bbatsov/RuboCop/RuboCop/Cop/Layout/FirstArgumentIndentation + Enabled: false + Layout/HashAlignment: + EnforcedLastArgumentHashStyle: ignore_implicit + Layout/LineLength: + Max: 120 + Layout/MultilineMethodCallIndentation: + # https://www.rubydoc.info/gems/rubocop/RuboCop/Cop/Layout/MultilineMethodCallIndentation + EnforcedStyle: indented + Lint/AmbiguousBlockAssociation: + AllowedMethods: ['change'] + + Metrics/MethodLength: + CountComments: false # count full line comments? + Max: 20 + + Style/Documentation: + Enabled: false + Style/DoubleNegation: + Enabled: false + Style/FormatStringToken: + Enabled: false + Style/HashSyntax: + Enabled: false + Style/MutableConstant: + Enabled: false + Style/StringLiterals: + Enabled: false + Style/StringConcatenation: + Enabled: false + diff --git a/Gemfile b/Gemfile index 9017d256..6ff46eff 100644 --- a/Gemfile +++ b/Gemfile @@ -1,48 +1,53 @@ +# frozen_string_literal: true + source "https://rubygems.org" -gem "nokogiri", '~> 1.13', '>= 1.13.4' +gem "nokogiri", '~> 1.13', '>= 1.13.4' gemspec -gem 'rdf', git: "https://github.com/ruby-rdf/rdf", branch: "develop" -gem 'json-canonicalization',git: "https://github.com/dryruby/json-canonicalization",branch: "develop" +gem 'json-canonicalization', git: "https://github.com/dryruby/json-canonicalization", branch: "develop" +gem 'rdf', git: "https://github.com/ruby-rdf/rdf", branch: "develop" group :development do - gem 'ebnf', git: "https://github.com/dryruby/ebnf", branch: "develop" - gem 'json-ld-preloaded', github: "ruby-rdf/json-ld-preloaded", branch: "develop" - gem 'ld-patch', github: "ruby-rdf/ld-patch", branch: "develop" - gem 'linkeddata', git: "https://github.com/ruby-rdf/linkeddata", branch: "develop" - gem 'rack-linkeddata', git: "https://github.com/ruby-rdf/rack-linkeddata", branch: "develop" - gem 'rdf-aggregate-repo', git: "https://github.com/ruby-rdf/rdf-aggregate-repo", branch: "develop" - gem 'rdf-isomorphic', git: "https://github.com/ruby-rdf/rdf-isomorphic", branch: "develop" - gem 'rdf-json', github: "ruby-rdf/rdf-json", branch: "develop" - gem 'rdf-microdata', git: "https://github.com/ruby-rdf/rdf-microdata", branch: "develop" - gem 'rdf-n3', github: "ruby-rdf/rdf-n3", branch: "develop" - gem 'rdf-normalize', github: "ruby-rdf/rdf-normalize", branch: "develop" - gem 'rdf-rdfa', git: "https://github.com/ruby-rdf/rdf-rdfa", branch: "develop" - gem 'rdf-rdfxml', git: "https://github.com/ruby-rdf/rdf-rdfxml", branch: "develop" - gem 'rdf-reasoner', github: "ruby-rdf/rdf-reasoner", branch: "develop" - gem 'rdf-spec', git: "https://github.com/ruby-rdf/rdf-spec", branch: "develop" - gem 'rdf-tabular', github: "ruby-rdf/rdf-tabular", branch: "develop" - gem 'rdf-trig', git: "https://github.com/ruby-rdf/rdf-trig", branch: "develop" - gem 'rdf-trix', github: "ruby-rdf/rdf-trix", branch: "develop" - gem 'rdf-turtle', git: "https://github.com/ruby-rdf/rdf-turtle", branch: "develop" - gem 'rdf-vocab', git: "https://github.com/ruby-rdf/rdf-vocab", branch: "develop" - gem 'rdf-xsd', git: "https://github.com/ruby-rdf/rdf-xsd", branch: "develop" - gem 'sinatra-linkeddata', git: "https://github.com/ruby-rdf/sinatra-linkeddata", branch: "develop" - gem 'shex', github: "ruby-rdf/shex", branch: "develop" - gem 'sparql', git: "https://github.com/ruby-rdf/sparql", branch: "develop" - gem 'sparql-client', git: "https://github.com/ruby-rdf/sparql-client", branch: "develop" - gem 'sxp', git: "https://github.com/dryruby/sxp.rb", branch: "develop" - gem 'fasterer' gem 'earl-report' - gem 'ruby-prof', platforms: :mri + gem 'ebnf', git: "https://github.com/dryruby/ebnf", branch: "develop" + gem 'fasterer' + gem 'json-ld-preloaded', github: "ruby-rdf/json-ld-preloaded", branch: "develop" + gem 'ld-patch', github: "ruby-rdf/ld-patch", branch: "develop" + gem 'linkeddata', git: "https://github.com/ruby-rdf/linkeddata", branch: "develop" + gem 'rack-linkeddata', git: "https://github.com/ruby-rdf/rack-linkeddata", branch: "develop" + gem 'rdf-aggregate-repo', git: "https://github.com/ruby-rdf/rdf-aggregate-repo", branch: "develop" + gem 'rdf-isomorphic', git: "https://github.com/ruby-rdf/rdf-isomorphic", branch: "develop" + gem 'rdf-json', github: "ruby-rdf/rdf-json", branch: "develop" + gem 'rdf-microdata', git: "https://github.com/ruby-rdf/rdf-microdata", branch: "develop" + gem 'rdf-n3', github: "ruby-rdf/rdf-n3", branch: "develop" + gem 'rdf-normalize', github: "ruby-rdf/rdf-normalize", branch: "develop" + gem 'rdf-rdfa', git: "https://github.com/ruby-rdf/rdf-rdfa", branch: "develop" + gem 'rdf-rdfxml', git: "https://github.com/ruby-rdf/rdf-rdfxml", branch: "develop" + gem 'rdf-reasoner', github: "ruby-rdf/rdf-reasoner", branch: "develop" + gem 'rdf-spec', git: "https://github.com/ruby-rdf/rdf-spec", branch: "develop" + gem 'rdf-tabular', github: "ruby-rdf/rdf-tabular", branch: "develop" + gem 'rdf-trig', git: "https://github.com/ruby-rdf/rdf-trig", branch: "develop" + gem 'rdf-trix', github: "ruby-rdf/rdf-trix", branch: "develop" + gem 'rdf-turtle', git: "https://github.com/ruby-rdf/rdf-turtle", branch: "develop" + gem 'rdf-vocab', git: "https://github.com/ruby-rdf/rdf-vocab", branch: "develop" + gem 'rdf-xsd', git: "https://github.com/ruby-rdf/rdf-xsd", branch: "develop" + gem 'ruby-prof', platforms: :mri + gem 'shex', github: "ruby-rdf/shex", branch: "develop" + gem 'sinatra-linkeddata', git: "https://github.com/ruby-rdf/sinatra-linkeddata", branch: "develop" + gem 'sparql', git: "https://github.com/ruby-rdf/sparql", branch: "develop" + gem 'sparql-client', git: "https://github.com/ruby-rdf/sparql-client", branch: "develop" + gem 'sxp', git: "https://github.com/dryruby/sxp.rb", branch: "develop" end group :development, :test do - gem 'simplecov', '~> 0.21', platforms: :mri - gem 'simplecov-lcov', '~> 0.8', platforms: :mri - gem 'psych', platforms: [:mri, :rbx] gem 'benchmark-ips' + gem 'psych', platforms: %i[mri rbx] gem 'rake' + gem 'rubocop' + gem 'rubocop-performance' + gem 'rubocop-rspec' + gem 'simplecov', '~> 0.21', platforms: :mri + gem 'simplecov-lcov', '~> 0.8', platforms: :mri end group :debug do diff --git a/Rakefile b/Rakefile index 14cc84ab..e39ec7da 100644 --- a/Rakefile +++ b/Rakefile @@ -1,6 +1,8 @@ +# frozen_string_literal: true + require 'rubygems' -task default: [ :spec ] +task default: [:spec] namespace :gem do desc "Build the json-ld-#{File.read('VERSION').chomp}.gem file" @@ -17,18 +19,18 @@ end require 'rspec/core/rake_task' desc 'Run specifications' RSpec::Core::RakeTask.new(:spec) do |spec| - spec.rspec_opts = %w(--options spec/spec.opts) if File.exists?('spec/spec.opts') + spec.rspec_opts = %w[--options spec/spec.opts] if File.exist?('spec/spec.opts') end desc "Generate schema.org context" task :schema_context do - %x( + ` script/gen_context https://schema.org/docs/schema_org_rdfa.html \ --vocab http://schema.org/ \ --prefix 'schema http://schema.org/' \ --body --hier \ --o etc/schema.org.jsonld - ) + ` end desc "Create concatenated test manifests" @@ -37,68 +39,73 @@ file "etc/manifests.nt" do require 'json/ld' require 'rdf/ntriples' graph = RDF::Graph.new do |g| - %w( https://w3c.github.io/json-ld-api/tests/compact-manifest.jsonld + %w[ https://w3c.github.io/json-ld-api/tests/compact-manifest.jsonld https://w3c.github.io/json-ld-api/tests/expand-manifest.jsonld https://w3c.github.io/json-ld-api/tests/flatten-manifest.jsonld https://w3c.github.io/json-ld-api/tests/fromRdf-manifest.jsonld https://w3c.github.io/json-ld-api/tests/html-manifest.jsonld https://w3c.github.io/json-ld-api/tests/remote-doc-manifest.jsonld https://w3c.github.io/json-ld-api/tests/toRdf-manifest.jsonld - https://w3c.github.io/json-ld-framing/tests/frame-manifest.jsonld - ).each do |man| + https://w3c.github.io/json-ld-framing/tests/frame-manifest.jsonld].each do |man| puts "load #{man}" g.load(man, unique_bnodes: true) end end puts "write" - RDF::NTriples::Writer.open("etc/manifests.nt", unique_bnodes: true, validate: false) {|w| w << graph} + RDF::NTriples::Writer.open("etc/manifests.nt", unique_bnodes: true, validate: false) { |w| w << graph } end # Presentation building namespace :presentation do desc "Clean presentation files" task :clean do - FileUtils.rm %w(compacted expanded framed).map {|f| "presentation/dbpedia/#{f}.jsonld"} + FileUtils.rm %w[compacted expanded framed].map { |f| "presentation/dbpedia/#{f}.jsonld" } end desc "Build presentation files" - task build: %w( + task build: %w[ presentation/dbpedia/expanded.jsonld presentation/dbpedia/compacted.jsonld presentation/dbpedia/framed.jsonld - ) + ] desc "Build expanded example" - file "presentation/dbpedia/expanded.jsonld" => %w( + file "presentation/dbpedia/expanded.jsonld" => %w[ presentation/dbpedia/orig.jsonld - presentation/dbpedia/expanded-context.jsonld) do - system(%w( - script/parse - --expand presentation/dbpedia/orig.jsonld - --context presentation/dbpedia/expanded-context.jsonld - -o presentation/dbpedia/expanded.jsonld).join(" ")) + presentation/dbpedia/expanded-context.jsonld + ] do + system(%w[ + script/parse + --expand presentation/dbpedia/orig.jsonld + --context presentation/dbpedia/expanded-context.jsonld + -o presentation/dbpedia/expanded.jsonld + ].join(" ")) end desc "Build compacted example" - file "presentation/dbpedia/compacted.jsonld" => %w( + file "presentation/dbpedia/compacted.jsonld" => %w[ presentation/dbpedia/expanded.jsonld - presentation/dbpedia/compact-context.jsonld) do - system(%w( - script/parse - --compact presentation/dbpedia/expanded.jsonld - --context presentation/dbpedia/compact-context.jsonld - -o presentation/dbpedia/compacted.jsonld).join(" ")) + presentation/dbpedia/compact-context.jsonld + ] do + system(%w[ + script/parse + --compact presentation/dbpedia/expanded.jsonld + --context presentation/dbpedia/compact-context.jsonld + -o presentation/dbpedia/compacted.jsonld + ].join(" ")) end desc "Build framed example" - file "presentation/dbpedia/framed.jsonld" => %w( + file "presentation/dbpedia/framed.jsonld" => %w[ presentation/dbpedia/expanded.jsonld - presentation/dbpedia/frame.jsonld) do - system(%w( - script/parse - --frame presentation/dbpedia/frame.jsonld - presentation/dbpedia/expanded.jsonld - -o presentation/dbpedia/framed.jsonld).join(" ")) + presentation/dbpedia/frame.jsonld + ] do + system(%w[ + script/parse + --frame presentation/dbpedia/frame.jsonld + presentation/dbpedia/expanded.jsonld + -o presentation/dbpedia/framed.jsonld + ].join(" ")) end end diff --git a/VERSION b/VERSION index 351227fc..5ae69bd5 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -3.2.4 +3.2.5 diff --git a/etc/schema.org.jsonld b/etc/schema.org.jsonld index 32816628..e69de29b 100644 --- a/etc/schema.org.jsonld +++ b/etc/schema.org.jsonld @@ -1,21398 +0,0 @@ -{ - "@context": [ - { - "@vocab": "http://schema.org/", - "about": { - "@type": "@id" - }, - "acceptedAnswer": { - "@type": "@id" - }, - "acceptedOffer": { - "@type": "@id" - }, - "acceptedPaymentMethod": { - "@type": "@id" - }, - "accountablePerson": { - "@type": "@id" - }, - "acquiredFrom": { - "@type": "@id" - }, - "actionStatus": { - "@type": "@id" - }, - "activityDuration": { - "@type": "Duration" - }, - "actor": { - "@type": "@id" - }, - "actors": { - "@type": "@id" - }, - "addOn": { - "@type": "@id" - }, - "additionalType": { - "@type": "@id" - }, - "address": { - "@type": "@id" - }, - "addressCountry": { - "@type": "@id" - }, - "advanceBookingRequirement": { - "@type": "@id" - }, - "adverseOutcome": { - "@type": "@id" - }, - "affectedBy": { - "@type": "@id" - }, - "affiliation": { - "@type": "@id" - }, - "agent": { - "@type": "@id" - }, - "aggregateRating": { - "@type": "@id" - }, - "album": { - "@type": "@id" - }, - "albums": { - "@type": "@id" - }, - "alumni": { - "@type": "@id" - }, - "alumniOf": { - "@type": "@id" - }, - "amountOfThisGood": { - "@type": "Number" - }, - "answerCount": { - "@type": "Integer" - }, - "antagonist": { - "@type": "@id" - }, - "applicableLocation": { - "@type": "@id" - }, - "application": { - "@type": "@id" - }, - "appliesToDeliveryMethod": { - "@type": "@id" - }, - "appliesToPaymentMethod": { - "@type": "@id" - }, - "area": { - "@type": "@id" - }, - "areaServed": { - "@type": "@id" - }, - "arrivalAirport": { - "@type": "@id" - }, - "arrivalBusStop": { - "@type": "@id" - }, - "arrivalStation": { - "@type": "@id" - }, - "arrivalTime": { - "@type": "DateTime" - }, - "arterialBranch": { - "@type": "@id" - }, - "associatedAnatomy": { - "@type": "@id" - }, - "associatedArticle": { - "@type": "@id" - }, - "associatedMedia": { - "@type": "@id" - }, - "attendee": { - "@type": "@id" - }, - "attendees": { - "@type": "@id" - }, - "audience": { - "@type": "@id" - }, - "audio": { - "@type": "@id" - }, - "author": { - "@type": "@id" - }, - "availability": { - "@type": "@id" - }, - "availabilityEnds": { - "@type": "DateTime" - }, - "availabilityStarts": { - "@type": "DateTime" - }, - "availableAtOrFrom": { - "@type": "@id" - }, - "availableChannel": { - "@type": "@id" - }, - "availableDeliveryMethod": { - "@type": "@id" - }, - "availableFrom": { - "@type": "DateTime" - }, - "availableIn": { - "@type": "@id" - }, - "availableLanguage": { - "@type": "@id" - }, - "availableService": { - "@type": "@id" - }, - "availableStrength": { - "@type": "@id" - }, - "availableTest": { - "@type": "@id" - }, - "availableThrough": { - "@type": "DateTime" - }, - "baseSalary": { - "@type": "Number" - }, - "billingAddress": { - "@type": "@id" - }, - "billingIncrement": { - "@type": "Number" - }, - "birthDate": { - "@type": "Date" - }, - "blogPost": { - "@type": "@id" - }, - "blogPosts": { - "@type": "@id" - }, - "bloodSupply": { - "@type": "@id" - }, - "bookFormat": { - "@type": "@id" - }, - "bookingAgent": { - "@type": "@id" - }, - "bookingTime": { - "@type": "DateTime" - }, - "borrower": { - "@type": "@id" - }, - "branch": { - "@type": "@id" - }, - "branchOf": { - "@type": "@id" - }, - "brand": { - "@type": "@id" - }, - "broadcaster": { - "@type": "@id" - }, - "businessFunction": { - "@type": "@id" - }, - "buyer": { - "@type": "@id" - }, - "byArtist": { - "@type": "@id" - }, - "calories": { - "@type": "@id" - }, - "candidate": { - "@type": "@id" - }, - "carbohydrateContent": { - "@type": "@id" - }, - "carrier": { - "@type": "@id" - }, - "catalog": { - "@type": "@id" - }, - "cause": { - "@type": "@id" - }, - "causeOf": { - "@type": "@id" - }, - "checkinTime": { - "@type": "DateTime" - }, - "checkoutTime": { - "@type": "DateTime" - }, - "childMaxAge": { - "@type": "Number" - }, - "childMinAge": { - "@type": "Number" - }, - "children": { - "@type": "@id" - }, - "cholesterolContent": { - "@type": "@id" - }, - "clipNumber": { - "@type": "Integer" - }, - "closes": { - "@type": "Time" - }, - "code": { - "@type": "@id" - }, - "codeRepository": { - "@type": "@id" - }, - "colleague": { - "@type": "@id" - }, - "colleagues": { - "@type": "@id" - }, - "collection": { - "@type": "@id" - }, - "comment": { - "@type": "@id" - }, - "commentCount": { - "@type": "Integer" - }, - "commentTime": { - "@type": "Date" - }, - "comprisedOf": { - "@type": "@id" - }, - "connectedTo": { - "@type": "@id" - }, - "contactOption": { - "@type": "@id" - }, - "contactPoint": { - "@type": "@id" - }, - "contactPoints": { - "@type": "@id" - }, - "containedIn": { - "@type": "@id" - }, - "contentLocation": { - "@type": "@id" - }, - "contentUrl": { - "@type": "@id" - }, - "contraindication": { - "@type": "@id" - }, - "contributor": { - "@type": "@id" - }, - "cookTime": { - "@type": "Duration" - }, - "copyrightHolder": { - "@type": "@id" - }, - "copyrightYear": { - "@type": "Number" - }, - "cost": { - "@type": "@id" - }, - "costCategory": { - "@type": "@id" - }, - "course": { - "@type": "@id" - }, - "creator": { - "@type": "@id" - }, - "customer": { - "@type": "@id" - }, - "dataset": { - "@type": "@id" - }, - "dateCreated": { - "@type": "Date" - }, - "dateIssued": { - "@type": "DateTime" - }, - "dateModified": { - "@type": "Date" - }, - "datePosted": { - "@type": "Date" - }, - "datePublished": { - "@type": "Date" - }, - "dayOfWeek": { - "@type": "@id" - }, - "deathDate": { - "@type": "Date" - }, - "deliveryAddress": { - "@type": "@id" - }, - "deliveryLeadTime": { - "@type": "@id" - }, - "deliveryMethod": { - "@type": "@id" - }, - "deliveryStatus": { - "@type": "@id" - }, - "department": { - "@type": "@id" - }, - "departureAirport": { - "@type": "@id" - }, - "departureBusStop": { - "@type": "@id" - }, - "departureStation": { - "@type": "@id" - }, - "departureTime": { - "@type": "DateTime" - }, - "depth": { - "@type": "@id" - }, - "diagnosis": { - "@type": "@id" - }, - "diagram": { - "@type": "@id" - }, - "diet": { - "@type": "@id" - }, - "differentialDiagnosis": { - "@type": "@id" - }, - "director": { - "@type": "@id" - }, - "directors": { - "@type": "@id" - }, - "discusses": { - "@type": "@id" - }, - "discussionUrl": { - "@type": "@id" - }, - "dissolutionDate": { - "@type": "Date" - }, - "distance": { - "@type": "@id" - }, - "distinguishingSign": { - "@type": "@id" - }, - "distribution": { - "@type": "@id" - }, - "domainIncludes": { - "@type": "@id" - }, - "doorTime": { - "@type": "DateTime" - }, - "doseSchedule": { - "@type": "@id" - }, - "doseValue": { - "@type": "Number" - }, - "downloadUrl": { - "@type": "@id" - }, - "downvoteCount": { - "@type": "Integer" - }, - "drainsTo": { - "@type": "@id" - }, - "dropoffLocation": { - "@type": "@id" - }, - "dropoffTime": { - "@type": "DateTime" - }, - "drug": { - "@type": "@id" - }, - "drugClass": { - "@type": "@id" - }, - "duplicateTherapy": { - "@type": "@id" - }, - "duration": { - "@type": "Duration" - }, - "durationOfWarranty": { - "@type": "@id" - }, - "editor": { - "@type": "@id" - }, - "educationalAlignment": { - "@type": "@id" - }, - "eligibleCustomerType": { - "@type": "@id" - }, - "eligibleDuration": { - "@type": "@id" - }, - "eligibleQuantity": { - "@type": "@id" - }, - "eligibleTransactionVolume": { - "@type": "@id" - }, - "embedUrl": { - "@type": "@id" - }, - "employee": { - "@type": "@id" - }, - "employees": { - "@type": "@id" - }, - "encodesCreativeWork": { - "@type": "@id" - }, - "encoding": { - "@type": "@id" - }, - "encodings": { - "@type": "@id" - }, - "endDate": { - "@type": "Date" - }, - "endTime": { - "@type": "DateTime" - }, - "endorsee": { - "@type": "@id" - }, - "endorsers": { - "@type": "@id" - }, - "entertainmentBusiness": { - "@type": "@id" - }, - "episode": { - "@type": "@id" - }, - "episodeNumber": { - "@type": "Integer" - }, - "episodes": { - "@type": "@id" - }, - "equal": { - "@type": "@id" - }, - "estimatesRiskOf": { - "@type": "@id" - }, - "event": { - "@type": "@id" - }, - "eventStatus": { - "@type": "@id" - }, - "events": { - "@type": "@id" - }, - "evidenceLevel": { - "@type": "@id" - }, - "exercisePlan": { - "@type": "@id" - }, - "expectedArrivalFrom": { - "@type": "DateTime" - }, - "expectedArrivalUntil": { - "@type": "DateTime" - }, - "expires": { - "@type": "Date" - }, - "fatContent": { - "@type": "@id" - }, - "fiberContent": { - "@type": "@id" - }, - "fileSize": { - "@type": "Integer" - }, - "followee": { - "@type": "@id" - }, - "follows": { - "@type": "@id" - }, - "foodEstablishment": { - "@type": "@id" - }, - "foodEvent": { - "@type": "@id" - }, - "founder": { - "@type": "@id" - }, - "founders": { - "@type": "@id" - }, - "foundingDate": { - "@type": "Date" - }, - "free": { - "@type": "Boolean" - }, - "fromLocation": { - "@type": "Number" - }, - "geo": { - "@type": "@id" - }, - "geographicArea": { - "@type": "@id" - }, - "greater": { - "@type": "@id" - }, - "greaterOrEqual": { - "@type": "@id" - }, - "guideline": { - "@type": "@id" - }, - "guidelineDate": { - "@type": "Date" - }, - "guidelineSubject": { - "@type": "@id" - }, - "hasDeliveryMethod": { - "@type": "@id" - }, - "hasPOS": { - "@type": "@id" - }, - "healthCondition": { - "@type": "@id" - }, - "height": { - "@type": "@id" - }, - "hiringOrganization": { - "@type": "@id" - }, - "homeLocation": { - "@type": "@id" - }, - "hospitalAffiliation": { - "@type": "@id" - }, - "hostingOrganization": { - "@type": "@id" - }, - "hoursAvailable": { - "@type": "@id" - }, - "identifyingExam": { - "@type": "@id" - }, - "identifyingTest": { - "@type": "@id" - }, - "illustrator": { - "@type": "@id" - }, - "image": { - "@type": "@id" - }, - "imagingTechnique": { - "@type": "@id" - }, - "inAlbum": { - "@type": "@id" - }, - "inPlaylist": { - "@type": "@id" - }, - "includedRiskFactor": { - "@type": "@id" - }, - "includesObject": { - "@type": "@id" - }, - "increasesRiskOf": { - "@type": "@id" - }, - "indication": { - "@type": "@id" - }, - "infectiousAgentClass": { - "@type": "@id" - }, - "insertion": { - "@type": "@id" - }, - "installUrl": { - "@type": "@id" - }, - "instrument": { - "@type": "@id" - }, - "interactingDrug": { - "@type": "@id" - }, - "inventoryLevel": { - "@type": "@id" - }, - "inverseOf": { - "@type": "@id" - }, - "isAccessoryOrSparePartFor": { - "@type": "@id" - }, - "isAvailableGenerically": { - "@type": "Boolean" - }, - "isBasedOnUrl": { - "@type": "@id" - }, - "isConsumableFor": { - "@type": "@id" - }, - "isFamilyFriendly": { - "@type": "Boolean" - }, - "isGift": { - "@type": "Boolean" - }, - "isPartOf": { - "@type": "@id" - }, - "isProprietary": { - "@type": "Boolean" - }, - "isRelatedTo": { - "@type": "@id" - }, - "isSimilarTo": { - "@type": "@id" - }, - "isVariantOf": { - "@type": "@id" - }, - "issuedBy": { - "@type": "@id" - }, - "issuedThrough": { - "@type": "@id" - }, - "itemCondition": { - "@type": "@id" - }, - "itemOffered": { - "@type": "@id" - }, - "itemReviewed": { - "@type": "@id" - }, - "itemShipped": { - "@type": "@id" - }, - "jobLocation": { - "@type": "@id" - }, - "knows": { - "@type": "@id" - }, - "labelDetails": { - "@type": "@id" - }, - "landlord": { - "@type": "@id" - }, - "language": { - "@type": "@id" - }, - "lastReviewed": { - "@type": "Date" - }, - "legalStatus": { - "@type": "@id" - }, - "lender": { - "@type": "@id" - }, - "lesser": { - "@type": "@id" - }, - "lesserOrEqual": { - "@type": "@id" - }, - "license": { - "@type": "@id" - }, - "location": { - "@type": "@id" - }, - "logo": { - "@type": "@id" - }, - "loser": { - "@type": "@id" - }, - "mainContentOfPage": { - "@type": "@id" - }, - "makesOffer": { - "@type": "@id" - }, - "manufacturer": { - "@type": "@id" - }, - "map": { - "@type": "@id" - }, - "maps": { - "@type": "@id" - }, - "maxPrice": { - "@type": "Number" - }, - "maxValue": { - "@type": "Number" - }, - "maximumIntake": { - "@type": "@id" - }, - "medicalSpecialty": { - "@type": "@id" - }, - "medicineSystem": { - "@type": "@id" - }, - "member": { - "@type": "@id" - }, - "memberOf": { - "@type": "@id" - }, - "members": { - "@type": "@id" - }, - "mentions": { - "@type": "@id" - }, - "merchant": { - "@type": "@id" - }, - "minPrice": { - "@type": "Number" - }, - "minValue": { - "@type": "Number" - }, - "modifiedTime": { - "@type": "DateTime" - }, - "multipleValues": { - "@type": "Boolean" - }, - "musicBy": { - "@type": "@id" - }, - "musicGroupMember": { - "@type": "@id" - }, - "nationality": { - "@type": "@id" - }, - "nerve": { - "@type": "@id" - }, - "nerveMotor": { - "@type": "@id" - }, - "nonEqual": { - "@type": "@id" - }, - "numAdults": { - "@type": "Number" - }, - "numChildren": { - "@type": "Number" - }, - "numTracks": { - "@type": "Integer" - }, - "numberOfEpisodes": { - "@type": "Number" - }, - "numberOfPages": { - "@type": "Integer" - }, - "numberOfSeasons": { - "@type": "Number" - }, - "numberofEmployees": { - "@type": "@id" - }, - "nutrition": { - "@type": "@id" - }, - "object": { - "@type": "@id" - }, - "offerCount": { - "@type": "Integer" - }, - "offers": { - "@type": "@id" - }, - "openingHours": { - "@type": "Duration" - }, - "openingHoursSpecification": { - "@type": "@id" - }, - "opens": { - "@type": "Time" - }, - "opponent": { - "@type": "@id" - }, - "orderDate": { - "@type": "DateTime" - }, - "orderStatus": { - "@type": "@id" - }, - "orderedItem": { - "@type": "@id" - }, - "origin": { - "@type": "@id" - }, - "originAddress": { - "@type": "@id" - }, - "originatesFrom": { - "@type": "@id" - }, - "owl": "http://www.w3.org/2002/07/owl#", - "owl:equivalentClass": { - "@type": "@vocab" - }, - "owl:equivalentProperty": { - "@type": "@vocab" - }, - "owl:imports": { - "@type": "@id" - }, - "owl:inverseOf": { - "@type": "@vocab" - }, - "owl:oneOf": { - "@type": "@vocab", - "@container": "@list" - }, - "owl:versionInfo": { - "@type": "xsd:string", - "@language": null - }, - "ownedFrom": { - "@type": "DateTime" - }, - "ownedThrough": { - "@type": "DateTime" - }, - "owns": { - "@type": "@id" - }, - "parent": { - "@type": "@id" - }, - "parentItem": { - "@type": "@id" - }, - "parentService": { - "@type": "@id" - }, - "parents": { - "@type": "@id" - }, - "partOfEpisode": { - "@type": "@id" - }, - "partOfOrder": { - "@type": "@id" - }, - "partOfSeason": { - "@type": "@id" - }, - "partOfSeries": { - "@type": "@id" - }, - "partOfSystem": { - "@type": "@id" - }, - "partOfTVSeries": { - "@type": "@id" - }, - "participant": { - "@type": "@id" - }, - "partySize": { - "@type": "Number" - }, - "paymentDue": { - "@type": "DateTime" - }, - "paymentMethod": { - "@type": "@id" - }, - "paymentUrl": { - "@type": "@id" - }, - "performer": { - "@type": "@id" - }, - "performerIn": { - "@type": "@id" - }, - "performers": { - "@type": "@id" - }, - "permitAudience": { - "@type": "@id" - }, - "photo": { - "@type": "@id" - }, - "photos": { - "@type": "@id" - }, - "pickupLocation": { - "@type": "@id" - }, - "pickupTime": { - "@type": "DateTime" - }, - "possibleTreatment": { - "@type": "@id" - }, - "potentialAction": { - "@type": "@id" - }, - "predecessorOf": { - "@type": "@id" - }, - "pregnancyCategory": { - "@type": "@id" - }, - "prepTime": { - "@type": "Duration" - }, - "prescribingInfo": { - "@type": "@id" - }, - "prescriptionStatus": { - "@type": "@id" - }, - "previousStartDate": { - "@type": "Date" - }, - "priceSpecification": { - "@type": "@id" - }, - "priceValidUntil": { - "@type": "Date" - }, - "primaryImageOfPage": { - "@type": "@id" - }, - "primaryPrevention": { - "@type": "@id" - }, - "procedureType": { - "@type": "@id" - }, - "processingTime": { - "@type": "Duration" - }, - "producer": { - "@type": "@id" - }, - "produces": { - "@type": "@id" - }, - "productionCompany": { - "@type": "@id" - }, - "programMembershipUsed": { - "@type": "@id" - }, - "programmingLanguage": { - "@type": "@id" - }, - "proteinContent": { - "@type": "@id" - }, - "provider": { - "@type": "@id" - }, - "providesService": { - "@type": "@id" - }, - "publication": { - "@type": "@id" - }, - "publishedOn": { - "@type": "@id" - }, - "publisher": { - "@type": "@id" - }, - "publishingPrinciples": { - "@type": "@id" - }, - "purpose": { - "@type": "@id" - }, - "rangeIncludes": { - "@type": "@id" - }, - "ratingCount": { - "@type": "Number" - }, - "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", - "rdfs": "http://www.w3.org/2000/01/rdf-schema#", - "rdfs:domain": { - "@type": "@id" - }, - "rdfs:range": { - "@type": "@id" - }, - "rdfs:subClassOf": { - "@type": "@id" - }, - "rdfs:subPropertyOf": { - "@type": "@id" - }, - "readonlyValue": { - "@type": "Boolean" - }, - "realEstateAgent": { - "@type": "@id" - }, - "recipe": { - "@type": "@id" - }, - "recipient": { - "@type": "@id" - }, - "recognizingAuthority": { - "@type": "@id" - }, - "recommendedIntake": { - "@type": "@id" - }, - "regionDrained": { - "@type": "@id" - }, - "regionsAllowed": { - "@type": "@id" - }, - "relatedAnatomy": { - "@type": "@id" - }, - "relatedCondition": { - "@type": "@id" - }, - "relatedDrug": { - "@type": "@id" - }, - "relatedLink": { - "@type": "@id" - }, - "relatedStructure": { - "@type": "@id" - }, - "relatedTherapy": { - "@type": "@id" - }, - "relatedTo": { - "@type": "@id" - }, - "releaseDate": { - "@type": "Date" - }, - "relevantSpecialty": { - "@type": "@id" - }, - "repetitions": { - "@type": "Number" - }, - "replacee": { - "@type": "@id" - }, - "replacer": { - "@type": "@id" - }, - "replyToUrl": { - "@type": "@id" - }, - "representativeOfPage": { - "@type": "Boolean" - }, - "requiredMaxAge": { - "@type": "Integer" - }, - "requiredMinAge": { - "@type": "Integer" - }, - "requiresSubscription": { - "@type": "Boolean" - }, - "reservationFor": { - "@type": "@id" - }, - "reservationStatus": { - "@type": "@id" - }, - "reservedTicket": { - "@type": "@id" - }, - "result": { - "@type": "@id" - }, - "resultReview": { - "@type": "@id" - }, - "review": { - "@type": "@id" - }, - "reviewCount": { - "@type": "Number" - }, - "reviewRating": { - "@type": "@id" - }, - "reviewedBy": { - "@type": "@id" - }, - "reviews": { - "@type": "@id" - }, - "riskFactor": { - "@type": "@id" - }, - "runsTo": { - "@type": "@id" - }, - "sameAs": { - "@type": "@id" - }, - "saturatedFatContent": { - "@type": "@id" - }, - "scheduledTime": { - "@type": "DateTime" - }, - "schema": "http://schema.org/", - "screenshot": { - "@type": "@id" - }, - "season": { - "@type": "@id" - }, - "seasonNumber": { - "@type": "Integer" - }, - "seasons": { - "@type": "@id" - }, - "secondaryPrevention": { - "@type": "@id" - }, - "seeks": { - "@type": "@id" - }, - "seller": { - "@type": "@id" - }, - "sender": { - "@type": "@id" - }, - "sensoryUnit": { - "@type": "@id" - }, - "seriousAdverseOutcome": { - "@type": "@id" - }, - "serviceArea": { - "@type": "@id" - }, - "serviceAudience": { - "@type": "@id" - }, - "serviceLocation": { - "@type": "@id" - }, - "serviceOperator": { - "@type": "@id" - }, - "servicePhone": { - "@type": "@id" - }, - "servicePostalAddress": { - "@type": "@id" - }, - "serviceSmsNumber": { - "@type": "@id" - }, - "serviceUrl": { - "@type": "@id" - }, - "sibling": { - "@type": "@id" - }, - "siblings": { - "@type": "@id" - }, - "signDetected": { - "@type": "@id" - }, - "signOrSymptom": { - "@type": "@id" - }, - "significantLink": { - "@type": "@id" - }, - "significantLinks": { - "@type": "@id" - }, - "sodiumContent": { - "@type": "@id" - }, - "source": { - "@type": "@id" - }, - "sourceOrganization": { - "@type": "@id" - }, - "source_ActionCollabClass": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_ActionCollabClass", - "source_DatasetClass": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_DatasetClass", - "source_GoodRelationsClass": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass", - "source_GoodRelationsProperties": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsProperties", - "source_LRMIClass": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_LRMIClass", - "source_QAStackExchange": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_QAStackExchange", - "source_WikiDoc": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc", - "source_rNews": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_rNews", - "sourcedFrom": { - "@type": "@id" - }, - "spatial": { - "@type": "@id" - }, - "specialty": { - "@type": "@id" - }, - "sponsor": { - "@type": "@id" - }, - "sportsActivityLocation": { - "@type": "@id" - }, - "sportsEvent": { - "@type": "@id" - }, - "sportsTeam": { - "@type": "@id" - }, - "spouse": { - "@type": "@id" - }, - "stage": { - "@type": "@id" - }, - "stageAsNumber": { - "@type": "Number" - }, - "startDate": { - "@type": "Date" - }, - "startTime": { - "@type": "DateTime" - }, - "status": { - "@type": "@id" - }, - "stepValue": { - "@type": "Number" - }, - "strengthValue": { - "@type": "Number" - }, - "study": { - "@type": "@id" - }, - "studyDesign": { - "@type": "@id" - }, - "studyLocation": { - "@type": "@id" - }, - "studySubject": { - "@type": "@id" - }, - "subEvent": { - "@type": "@id" - }, - "subEvents": { - "@type": "@id" - }, - "subOrganization": { - "@type": "@id" - }, - "subReservation": { - "@type": "@id" - }, - "subStructure": { - "@type": "@id" - }, - "subTest": { - "@type": "@id" - }, - "successorOf": { - "@type": "@id" - }, - "sugarContent": { - "@type": "@id" - }, - "suggestedAnswer": { - "@type": "@id" - }, - "suggestedMaxAge": { - "@type": "Number" - }, - "suggestedMinAge": { - "@type": "Number" - }, - "superEvent": { - "@type": "@id" - }, - "supercededBy": { - "@type": "@id" - }, - "supplyTo": { - "@type": "@id" - }, - "target": { - "@type": "@id" - }, - "targetProduct": { - "@type": "@id" - }, - "targetUrl": { - "@type": "@id" - }, - "temporal": { - "@type": "DateTime" - }, - "thumbnail": { - "@type": "@id" - }, - "thumbnailUrl": { - "@type": "@id" - }, - "ticketedSeat": { - "@type": "@id" - }, - "timeRequired": { - "@type": "Duration" - }, - "toLocation": { - "@type": "Number" - }, - "totalTime": { - "@type": "Duration" - }, - "track": { - "@type": "@id" - }, - "trackingUrl": { - "@type": "@id" - }, - "tracks": { - "@type": "@id" - }, - "trailer": { - "@type": "@id" - }, - "transFatContent": { - "@type": "@id" - }, - "trialDesign": { - "@type": "@id" - }, - "tributary": { - "@type": "@id" - }, - "typeOfGood": { - "@type": "@id" - }, - "typicalTest": { - "@type": "@id" - }, - "underName": { - "@type": "@id" - }, - "unsaturatedFatContent": { - "@type": "@id" - }, - "uploadDate": { - "@type": "Date" - }, - "upvoteCount": { - "@type": "Integer" - }, - "url": { - "@type": "@id" - }, - "usedToDiagnose": { - "@type": "@id" - }, - "usesDevice": { - "@type": "@id" - }, - "validFor": { - "@type": "Duration" - }, - "validFrom": { - "@type": "DateTime" - }, - "validIn": { - "@type": "@id" - }, - "validThrough": { - "@type": "DateTime" - }, - "validUntil": { - "@type": "Date" - }, - "value": { - "@type": "Number" - }, - "valueAddedTaxIncluded": { - "@type": "Boolean" - }, - "valueMaxLength": { - "@type": "Number" - }, - "valueMinLength": { - "@type": "Number" - }, - "valuePattern": { - "@type": "Number" - }, - "valueReference": { - "@type": "@id" - }, - "valueRequired": { - "@type": "Boolean" - }, - "vendor": { - "@type": "@id" - }, - "version": { - "@type": "Number" - }, - "video": { - "@type": "@id" - }, - "warranty": { - "@type": "@id" - }, - "warrantyPromise": { - "@type": "@id" - }, - "warrantyScope": { - "@type": "@id" - }, - "webCheckinTime": { - "@type": "DateTime" - }, - "weight": { - "@type": "@id" - }, - "width": { - "@type": "@id" - }, - "winner": { - "@type": "@id" - }, - "wordCount": { - "@type": "Integer" - }, - "workLocation": { - "@type": "@id" - }, - "workPerformed": { - "@type": "@id" - }, - "workload": { - "@type": "@id" - }, - "worksFor": { - "@type": "@id" - }, - "yearlyRevenue": { - "@type": "@id" - }, - "yearsInOperation": { - "@type": "@id" - } - }, - { - "children": { - "@reverse": "rdfs:subClassOf" - }, - "properties": { - "@reverse": "schema:domainIncludes" - } - } - ], - "@graph": [ - { - "@id": "schema:Thing", - "@type": "rdfs:Class", - "rdfs:comment": "The most generic type of item.", - "rdfs:label": "Thing", - "children": [ - { - "@id": "schema:Action", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_ActionCollabClass" - }, - "rdfs:comment": "An action performed by a direct agent and indirect participants upon a direct object. Optionally happens at a location with the help of an inanimate instrument. The execution of the action may produce a result. Specific action sub-type documentation specifies the exact expectation of each argument/role.", - "rdfs:label": "Action", - "rdfs:subClassOf": "schema:Thing", - "children": [ - { - "@id": "schema:AchieveAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of accomplishing something via previous efforts. It is an instantaneous action rather than an ongoing process.", - "rdfs:label": "AchieveAction", - "rdfs:subClassOf": "schema:Action", - "children": [ - { - "@id": "schema:LoseAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of being defeated in a competitive activity.", - "rdfs:label": "LoseAction", - "rdfs:subClassOf": "schema:AchieveAction", - "properties": [ - { - "@id": "schema:winner", - "@type": "rdf:Property", - "domainIncludes": "schema:LoseAction", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A sub property of participant. The winner of the action.", - "rdfs:label": "winner", - "rdfs:subPropertyOf": "schema:participant" - } - ] - }, - { - "@id": "schema:TieAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of reaching a draw in a competitive activity.", - "rdfs:label": "TieAction", - "rdfs:subClassOf": "schema:AchieveAction" - }, - { - "@id": "schema:WinAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of achieving victory in a competitive activity.", - "rdfs:label": "WinAction", - "rdfs:subClassOf": "schema:AchieveAction", - "properties": [ - { - "@id": "schema:loser", - "@type": "rdf:Property", - "domainIncludes": "schema:WinAction", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A sub property of participant. The loser of the action.", - "rdfs:label": "loser", - "rdfs:subPropertyOf": "schema:participant" - } - ] - } - ] - }, - { - "@id": "schema:AssessAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of forming one's opinion, reaction or sentiment.", - "rdfs:label": "AssessAction", - "rdfs:subClassOf": "schema:Action", - "children": [ - { - "@id": "schema:ChooseAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of expressing a preference from a set of options or a large or unbounded set of choices/options.", - "rdfs:label": "ChooseAction", - "rdfs:subClassOf": "schema:AssessAction", - "children": [ - { - "@id": "schema:VoteAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of expressing a preference from a fixed/finite/structured set of choices/options.", - "rdfs:label": "VoteAction", - "rdfs:subClassOf": "schema:ChooseAction", - "properties": [ - { - "@id": "schema:candidate", - "@type": "rdf:Property", - "domainIncludes": "schema:VoteAction", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A sub property of object. The candidate subject of this action.", - "rdfs:label": "candidate", - "rdfs:subPropertyOf": "schema:object" - } - ] - } - ], - "properties": [ - { - "@id": "schema:option", - "@type": "rdf:Property", - "domainIncludes": "schema:ChooseAction", - "rangeIncludes": [ - "schema:Text", - "schema:Thing" - ], - "rdfs:comment": "A sub property of object. The options subject to this action.", - "rdfs:label": "option", - "rdfs:subPropertyOf": "schema:object" - } - ] - }, - { - "@id": "schema:IgnoreAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of intentionally disregarding the object. An agent ignores an object.", - "rdfs:label": "IgnoreAction", - "rdfs:subClassOf": "schema:AssessAction" - }, - { - "@id": "schema:ReactAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of responding instinctively and emotionally to an object, expressing a sentiment.", - "rdfs:label": "ReactAction", - "rdfs:subClassOf": "schema:AssessAction", - "children": [ - { - "@id": "schema:AgreeAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of expressing a consistency of opinion with the object. An agent agrees to/about an object (a proposition, topic or theme) with participants.", - "rdfs:label": "AgreeAction", - "rdfs:subClassOf": "schema:ReactAction" - }, - { - "@id": "schema:DisagreeAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of expressing a difference of opinion with the object. An agent disagrees to/about an object (a proposition, topic or theme) with participants.", - "rdfs:label": "DisagreeAction", - "rdfs:subClassOf": "schema:ReactAction" - }, - { - "@id": "schema:DislikeAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of expressing a negative sentiment about the object. An agent dislikes an object (a proposition, topic or theme) with participants.", - "rdfs:label": "DislikeAction", - "rdfs:subClassOf": "schema:ReactAction" - }, - { - "@id": "schema:EndorseAction", - "@type": "rdfs:Class", - "rdfs:comment": "An agent approves/certifies/likes/supports/sanction an object.", - "rdfs:label": "EndorseAction", - "rdfs:subClassOf": "schema:ReactAction", - "properties": [ - { - "@id": "schema:endorsee", - "@type": "rdf:Property", - "domainIncludes": "schema:EndorseAction", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A sub property of participant. The person/organization being supported.", - "rdfs:label": "endorsee", - "rdfs:subPropertyOf": "schema:participant" - } - ] - }, - { - "@id": "schema:LikeAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of expressing a positive sentiment about the object. An agent likes an object (a proposition, topic or theme) with participants.", - "rdfs:label": "LikeAction", - "rdfs:subClassOf": "schema:ReactAction" - }, - { - "@id": "schema:WantAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of expressing a desire about the object. An agent wants an object.", - "rdfs:label": "WantAction", - "rdfs:subClassOf": "schema:ReactAction" - } - ] - }, - { - "@id": "schema:ReviewAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of producing a balanced opinion about the object for an audience. An agent reviews an object with participants resulting in a review.", - "rdfs:label": "ReviewAction", - "rdfs:subClassOf": "schema:AssessAction", - "properties": [ - { - "@id": "schema:resultReview", - "@type": "rdf:Property", - "domainIncludes": "schema:ReviewAction", - "rangeIncludes": "schema:Review", - "rdfs:comment": "A sub property of result. The review that resulted in the performing of the action.", - "rdfs:label": "resultReview", - "rdfs:subPropertyOf": "schema:result" - } - ] - } - ] - }, - { - "@id": "schema:ConsumeAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of ingesting information/resources/food.", - "rdfs:label": "ConsumeAction", - "rdfs:subClassOf": "schema:Action", - "children": [ - { - "@id": "schema:DrinkAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of swallowing liquids.", - "rdfs:label": "DrinkAction", - "rdfs:subClassOf": "schema:ConsumeAction" - }, - { - "@id": "schema:EatAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of swallowing solid objects.", - "rdfs:label": "EatAction", - "rdfs:subClassOf": "schema:ConsumeAction" - }, - { - "@id": "schema:InstallAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of installing an application.", - "rdfs:label": "InstallAction", - "rdfs:subClassOf": "schema:ConsumeAction" - }, - { - "@id": "schema:ListenAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of consuming audio content.", - "rdfs:label": "ListenAction", - "rdfs:subClassOf": "schema:ConsumeAction" - }, - { - "@id": "schema:ReadAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of consuming written content.", - "rdfs:label": "ReadAction", - "rdfs:subClassOf": "schema:ConsumeAction" - }, - { - "@id": "schema:UseAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of applying an object to its intended purpose.", - "rdfs:label": "UseAction", - "rdfs:subClassOf": "schema:ConsumeAction", - "children": [ - { - "@id": "schema:WearAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of dressing oneself in clothing.", - "rdfs:label": "WearAction", - "rdfs:subClassOf": "schema:UseAction" - } - ] - }, - { - "@id": "schema:ViewAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of consuming static visual content.", - "rdfs:label": "ViewAction", - "rdfs:subClassOf": "schema:ConsumeAction" - }, - { - "@id": "schema:WatchAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of consuming dynamic/moving visual content.", - "rdfs:label": "WatchAction", - "rdfs:subClassOf": "schema:ConsumeAction" - } - ] - }, - { - "@id": "schema:CreateAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of deliberately creating/producing/generating/building a result out of the agent.", - "rdfs:label": "CreateAction", - "rdfs:subClassOf": "schema:Action", - "children": [ - { - "@id": "schema:CookAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of producing/preparing food.", - "rdfs:label": "CookAction", - "rdfs:subClassOf": "schema:CreateAction", - "properties": [ - { - "@id": "schema:foodEstablishment", - "@type": "rdf:Property", - "domainIncludes": "schema:CookAction", - "rangeIncludes": [ - "schema:FoodEstablishment", - "schema:Place" - ], - "rdfs:comment": "A sub property of location. The specific food establishment where the action occurred.", - "rdfs:label": "foodEstablishment", - "rdfs:subPropertyOf": "schema:location" - }, - { - "@id": "schema:foodEvent", - "@type": "rdf:Property", - "domainIncludes": "schema:CookAction", - "rangeIncludes": "schema:FoodEvent", - "rdfs:comment": "A sub property of location. The specific food event where the action occurred.", - "rdfs:label": "foodEvent", - "rdfs:subPropertyOf": "schema:location" - }, - { - "@id": "schema:recipe", - "@type": "rdf:Property", - "domainIncludes": "schema:CookAction", - "rangeIncludes": "schema:Recipe", - "rdfs:comment": "A sub property of instrument. The recipe/instructions used to perform the action.", - "rdfs:label": "recipe", - "rdfs:subPropertyOf": "schema:instrument" - } - ] - }, - { - "@id": "schema:DrawAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of producing a visual/graphical representation of an object, typically with a pen/pencil and paper as instruments.", - "rdfs:label": "DrawAction", - "rdfs:subClassOf": "schema:CreateAction" - }, - { - "@id": "schema:FilmAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of capturing sound and moving images on film, video, or digitally.", - "rdfs:label": "FilmAction", - "rdfs:subClassOf": "schema:CreateAction" - }, - { - "@id": "schema:PaintAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of producing a painting, typically with paint and canvas as instruments.", - "rdfs:label": "PaintAction", - "rdfs:subClassOf": "schema:CreateAction" - }, - { - "@id": "schema:PhotographAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of capturing still images of objects using a camera.", - "rdfs:label": "PhotographAction", - "rdfs:subClassOf": "schema:CreateAction" - }, - { - "@id": "schema:WriteAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of authoring written creative content.", - "rdfs:label": "WriteAction", - "rdfs:subClassOf": "schema:CreateAction", - "properties": [ - { - "@id": "schema:language", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CommunicateAction", - "schema:WriteAction" - ], - "rangeIncludes": "schema:Language", - "rdfs:comment": "A sub property of instrument. The language used on this action.", - "rdfs:label": "language", - "rdfs:subPropertyOf": "schema:instrument" - } - ] - } - ] - }, - { - "@id": "schema:FindAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of finding an object.

Related actions:

", - "rdfs:label": "FindAction", - "rdfs:subClassOf": "schema:Action", - "children": [ - { - "@id": "schema:CheckAction", - "@type": "rdfs:Class", - "rdfs:comment": "An agent inspects/determines/investigates/inquire or examine an object's accuracy/quality/condition or state.", - "rdfs:label": "CheckAction", - "rdfs:subClassOf": "schema:FindAction" - }, - { - "@id": "schema:DiscoverAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of discovering/finding an object.", - "rdfs:label": "DiscoverAction", - "rdfs:subClassOf": "schema:FindAction" - }, - { - "@id": "schema:TrackAction", - "@type": "rdfs:Class", - "rdfs:comment": "An agent tracks an object for updates.

Related actions:

", - "rdfs:label": "TrackAction", - "rdfs:subClassOf": "schema:FindAction", - "properties": [ - { - "@id": "schema:deliveryMethod", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:ReceiveAction", - "schema:SendAction", - "schema:TrackAction" - ], - "rangeIncludes": "schema:DeliveryMethod", - "rdfs:comment": "A sub property of instrument. The method of delivery", - "rdfs:label": "deliveryMethod", - "rdfs:subPropertyOf": "schema:instrument" - } - ] - } - ] - }, - { - "@id": "schema:InteractAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of interacting with another person or organization.", - "rdfs:label": "InteractAction", - "rdfs:subClassOf": "schema:Action", - "children": [ - { - "@id": "schema:BefriendAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of forming a personal connection with someone (object) mutually/bidirectionally/symmetrically.

Related actions:

", - "rdfs:label": "BefriendAction", - "rdfs:subClassOf": "schema:InteractAction" - }, - { - "@id": "schema:CommunicateAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of conveying information to another person via a communication medium (instrument) such as speech, email, or telephone conversation.", - "rdfs:label": "CommunicateAction", - "rdfs:subClassOf": "schema:InteractAction", - "children": [ - { - "@id": "schema:AskAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of posing a question / favor to someone.

Related actions:

", - "rdfs:label": "AskAction", - "rdfs:subClassOf": "schema:CommunicateAction", - "properties": [ - { - "@id": "schema:question", - "@type": "rdf:Property", - "domainIncludes": "schema:AskAction", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A sub property of object. A question.", - "rdfs:label": "question", - "rdfs:subPropertyOf": "schema:object" - } - ] - }, - { - "@id": "schema:CheckInAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of an agent communicating (service provider, social media, etc) their arrival by registering/confirming for a previously reserved service (e.g. flight check in) or at a place (e.g. hotel), possibly resulting in a result (boarding pass, etc).

Related actions:

", - "rdfs:label": "CheckInAction", - "rdfs:subClassOf": "schema:CommunicateAction" - }, - { - "@id": "schema:CheckOutAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of an agent communicating (service provider, social media, etc) their departure of a previously reserved service (e.g. flight check in) or place (e.g. hotel).

Related actions:

", - "rdfs:label": "CheckOutAction", - "rdfs:subClassOf": "schema:CommunicateAction" - }, - { - "@id": "schema:CommentAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of generating a comment about a subject.", - "rdfs:label": "CommentAction", - "rdfs:subClassOf": "schema:CommunicateAction" - }, - { - "@id": "schema:InformAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of notifying someone of information pertinent to them, with no expectation of a response.", - "rdfs:label": "InformAction", - "rdfs:subClassOf": "schema:CommunicateAction", - "children": [ - { - "@id": "schema:ConfirmAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of notifying someone that a future event/action is going to happen as expected.

Related actions:

", - "rdfs:label": "ConfirmAction", - "rdfs:subClassOf": "schema:InformAction" - }, - { - "@id": "schema:RsvpAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of notifying an event organizer as to whether you expect to attend the event.", - "rdfs:label": "RsvpAction", - "rdfs:subClassOf": "schema:InformAction" - } - ], - "properties": [ - { - "@id": "schema:event", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:InformAction", - "schema:PlayAction", - "schema:InviteAction", - "schema:JoinAction", - "schema:LeaveAction" - ], - "rangeIncludes": "schema:Event", - "rdfs:comment": "Upcoming or past event associated with this place or organization.", - "rdfs:label": "event" - } - ] - }, - { - "@id": "schema:InviteAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of asking someone to attend an event. Reciprocal of RsvpAction.", - "rdfs:label": "InviteAction", - "rdfs:subClassOf": "schema:CommunicateAction", - "properties": [ - { - "@id": "schema:event", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:InformAction", - "schema:PlayAction", - "schema:InviteAction", - "schema:JoinAction", - "schema:LeaveAction" - ], - "rangeIncludes": "schema:Event", - "rdfs:comment": "Upcoming or past event associated with this place or organization.", - "rdfs:label": "event" - } - ] - }, - { - "@id": "schema:ReplyAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of responding to a question/message asked/sent by the object. Related to AskAction.

Related actions:

", - "rdfs:label": "ReplyAction", - "rdfs:subClassOf": "schema:CommunicateAction" - }, - { - "@id": "schema:ShareAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of distributing content to people for their amusement or edification.", - "rdfs:label": "ShareAction", - "rdfs:subClassOf": "schema:CommunicateAction" - } - ], - "properties": [ - { - "@id": "schema:about", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:CommunicateAction" - ], - "rangeIncludes": "schema:Thing", - "rdfs:comment": "The subject matter of the content.", - "rdfs:label": "about" - }, - { - "@id": "schema:language", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CommunicateAction", - "schema:WriteAction" - ], - "rangeIncludes": "schema:Language", - "rdfs:comment": "A sub property of instrument. The language used on this action.", - "rdfs:label": "language", - "rdfs:subPropertyOf": "schema:instrument" - }, - { - "@id": "schema:recipient", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CommunicateAction", - "schema:AuthorizeAction", - "schema:DonateAction", - "schema:GiveAction", - "schema:PayAction", - "schema:ReturnAction", - "schema:SendAction", - "schema:TipAction" - ], - "rangeIncludes": [ - "schema:Audience", - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A sub property of participant. The participant who is at the receiving end of the action.", - "rdfs:label": "recipient", - "rdfs:subPropertyOf": "schema:participant" - } - ] - }, - { - "@id": "schema:FollowAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of forming a personal connection with someone/something (object) unidirectionally/asymmetrically to get updates polled from.

Related actions:

", - "rdfs:label": "FollowAction", - "rdfs:subClassOf": "schema:InteractAction", - "properties": [ - { - "@id": "schema:followee", - "@type": "rdf:Property", - "domainIncludes": "schema:FollowAction", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A sub property of object. The person or organization being followed.", - "rdfs:label": "followee", - "rdfs:subPropertyOf": "schema:object" - } - ] - }, - { - "@id": "schema:JoinAction", - "@type": "rdfs:Class", - "rdfs:comment": "An agent joins an event/group with participants/friends at a location.

Related actions:

", - "rdfs:label": "JoinAction", - "rdfs:subClassOf": "schema:InteractAction", - "properties": [ - { - "@id": "schema:event", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:InformAction", - "schema:PlayAction", - "schema:InviteAction", - "schema:JoinAction", - "schema:LeaveAction" - ], - "rangeIncludes": "schema:Event", - "rdfs:comment": "Upcoming or past event associated with this place or organization.", - "rdfs:label": "event" - } - ] - }, - { - "@id": "schema:LeaveAction", - "@type": "rdfs:Class", - "rdfs:comment": "An agent leaves an event / group with participants/friends at a location.

Related actions:

", - "rdfs:label": "LeaveAction", - "rdfs:subClassOf": "schema:InteractAction", - "properties": [ - { - "@id": "schema:event", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:InformAction", - "schema:PlayAction", - "schema:InviteAction", - "schema:JoinAction", - "schema:LeaveAction" - ], - "rangeIncludes": "schema:Event", - "rdfs:comment": "Upcoming or past event associated with this place or organization.", - "rdfs:label": "event" - } - ] - }, - { - "@id": "schema:MarryAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of marrying a person.", - "rdfs:label": "MarryAction", - "rdfs:subClassOf": "schema:InteractAction" - }, - { - "@id": "schema:RegisterAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of registering to be a user of a service, product or web page.

Related actions:

", - "rdfs:label": "RegisterAction", - "rdfs:subClassOf": "schema:InteractAction" - }, - { - "@id": "schema:SubscribeAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of forming a personal connection with someone/something (object) unidirectionally/asymmetrically to get updates pushed to.

Related actions:

", - "rdfs:label": "SubscribeAction", - "rdfs:subClassOf": "schema:InteractAction" - }, - { - "@id": "schema:UnRegisterAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of un-registering from a service.

Related actions:

", - "rdfs:label": "UnRegisterAction", - "rdfs:subClassOf": "schema:InteractAction" - } - ] - }, - { - "@id": "schema:MoveAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of an agent relocating to a place.

Related actions:

", - "rdfs:label": "MoveAction", - "rdfs:subClassOf": "schema:Action", - "children": [ - { - "@id": "schema:ArriveAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of arriving at a place. An agent arrives at a destination from an fromLocation, optionally with participants.", - "rdfs:label": "ArriveAction", - "rdfs:subClassOf": "schema:MoveAction" - }, - { - "@id": "schema:DepartAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of departing from a place. An agent departs from an fromLocation for a destination, optionally with participants.", - "rdfs:label": "DepartAction", - "rdfs:subClassOf": "schema:MoveAction" - }, - { - "@id": "schema:TravelAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of traveling from an fromLocation to a destination by a specified mode of transport, optionally with participants.", - "rdfs:label": "TravelAction", - "rdfs:subClassOf": "schema:MoveAction", - "properties": [ - { - "@id": "schema:distance", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:ExerciseAction", - "schema:TravelAction" - ], - "rangeIncludes": "schema:Distance", - "rdfs:comment": "The distance travelled, e.g. exercising or travelling.", - "rdfs:label": "distance" - } - ] - } - ], - "properties": [ - { - "@id": "schema:fromLocation", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MoveAction", - "schema:TransferAction", - "schema:ExerciseAction" - ], - "rangeIncludes": [ - "schema:Number", - "schema:Place" - ], - "rdfs:comment": "A sub property of location. The original location of the object or the agent before the action.", - "rdfs:label": "fromLocation", - "rdfs:subPropertyOf": "schema:location" - }, - { - "@id": "schema:toLocation", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:InsertAction", - "schema:MoveAction", - "schema:TransferAction", - "schema:ExerciseAction" - ], - "rangeIncludes": [ - "schema:Number", - "schema:Place" - ], - "rdfs:comment": "A sub property of location. The final location of the object or the agent after the action.", - "rdfs:label": "toLocation", - "rdfs:subPropertyOf": "schema:location" - } - ] - }, - { - "@id": "schema:OrganizeAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of manipulating/administering/supervising/controlling one or more objects.", - "rdfs:label": "OrganizeAction", - "rdfs:subClassOf": "schema:Action", - "children": [ - { - "@id": "schema:AllocateAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of organizing tasks/objects/events by associating resources to it.", - "rdfs:label": "AllocateAction", - "rdfs:subClassOf": "schema:OrganizeAction", - "children": [ - { - "@id": "schema:AcceptAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of committing to/adopting an object.

Related actions:

", - "rdfs:label": "AcceptAction", - "rdfs:subClassOf": "schema:AllocateAction" - }, - { - "@id": "schema:AssignAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of allocating an action/event/task to some destination (someone or something).", - "rdfs:label": "AssignAction", - "rdfs:subClassOf": "schema:AllocateAction" - }, - { - "@id": "schema:AuthorizeAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of granting permission to an object.", - "rdfs:label": "AuthorizeAction", - "rdfs:subClassOf": "schema:AllocateAction", - "properties": [ - { - "@id": "schema:recipient", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CommunicateAction", - "schema:AuthorizeAction", - "schema:DonateAction", - "schema:GiveAction", - "schema:PayAction", - "schema:ReturnAction", - "schema:SendAction", - "schema:TipAction" - ], - "rangeIncludes": [ - "schema:Audience", - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A sub property of participant. The participant who is at the receiving end of the action.", - "rdfs:label": "recipient", - "rdfs:subPropertyOf": "schema:participant" - } - ] - }, - { - "@id": "schema:RejectAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of rejecting to/adopting an object.

Related actions:

", - "rdfs:label": "RejectAction", - "rdfs:subClassOf": "schema:AllocateAction" - } - ], - "properties": [ - { - "@id": "schema:purpose", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MedicalDevice", - "schema:AllocateAction", - "schema:PayAction" - ], - "rangeIncludes": [ - "schema:MedicalDevicePurpose", - "schema:Thing" - ], - "rdfs:comment": "A goal towards an action is taken. Can be concrete or abstract.", - "rdfs:label": "purpose" - } - ] - }, - { - "@id": "schema:ApplyAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of registering to an organization/service without the guarantee to receive it. NOTE(goto): should this be under InteractAction instead?

Related actions:

", - "rdfs:label": "ApplyAction", - "rdfs:subClassOf": "schema:OrganizeAction" - }, - { - "@id": "schema:BookmarkAction", - "@type": "rdfs:Class", - "rdfs:comment": "An agent bookmarks/flags/labels/tags/marks an object.", - "rdfs:label": "BookmarkAction", - "rdfs:subClassOf": "schema:OrganizeAction" - }, - { - "@id": "schema:PlanAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of planning the execution of an event/task/action/reservation/plan to a future date.", - "rdfs:label": "PlanAction", - "rdfs:subClassOf": "schema:OrganizeAction", - "children": [ - { - "@id": "schema:CancelAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of asserting that a future event/action is no longer going to happen.

Related actions:

", - "rdfs:label": "CancelAction", - "rdfs:subClassOf": "schema:PlanAction" - }, - { - "@id": "schema:ReserveAction", - "@type": "rdfs:Class", - "rdfs:comment": "Reserving a concrete object.

Related actions:

", - "rdfs:label": "ReserveAction", - "rdfs:subClassOf": "schema:PlanAction", - "properties": [ - { - "@id": "schema:scheduledTime", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:PlanAction", - "schema:ReserveAction" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The time the object is scheduled to.", - "rdfs:label": "scheduledTime" - } - ] - }, - { - "@id": "schema:ScheduleAction", - "@type": "rdfs:Class", - "rdfs:comment": "Scheduling future actions, events, or tasks.

Related actions:

", - "rdfs:label": "ScheduleAction", - "rdfs:subClassOf": "schema:PlanAction" - } - ], - "properties": [ - { - "@id": "schema:scheduledTime", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:PlanAction", - "schema:ReserveAction" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The time the object is scheduled to.", - "rdfs:label": "scheduledTime" - } - ] - } - ] - }, - { - "@id": "schema:PlayAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of playing/exercising/training/performing for enjoyment, leisure, recreation, Competition or exercise.

Related actions:

", - "rdfs:label": "PlayAction", - "rdfs:subClassOf": "schema:Action", - "children": [ - { - "@id": "schema:ExerciseAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of participating in exertive activity for the purposes of improving health and fitness", - "rdfs:label": "ExerciseAction", - "rdfs:subClassOf": "schema:PlayAction", - "properties": [ - { - "@id": "schema:course", - "@type": "rdf:Property", - "domainIncludes": "schema:ExerciseAction", - "rangeIncludes": "schema:Place", - "rdfs:comment": "A sub property of location. The course where this action was taken.", - "rdfs:label": "course", - "rdfs:subPropertyOf": "schema:location" - }, - { - "@id": "schema:diet", - "@type": "rdf:Property", - "domainIncludes": "schema:ExerciseAction", - "rangeIncludes": "schema:Diet", - "rdfs:comment": "A sub property of instrument. The diet used in this action.", - "rdfs:label": "diet", - "rdfs:subPropertyOf": "schema:instrument" - }, - { - "@id": "schema:distance", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:ExerciseAction", - "schema:TravelAction" - ], - "rangeIncludes": "schema:Distance", - "rdfs:comment": "The distance travelled, e.g. exercising or travelling.", - "rdfs:label": "distance" - }, - { - "@id": "schema:exercisePlan", - "@type": "rdf:Property", - "domainIncludes": "schema:ExerciseAction", - "rangeIncludes": "schema:ExercisePlan", - "rdfs:comment": "A sub property of instrument. The exercise plan used on this action.", - "rdfs:label": "exercisePlan", - "rdfs:subPropertyOf": "schema:instrument" - }, - { - "@id": "schema:exerciseType", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:ExercisePlan", - "schema:ExerciseAction" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "Type(s) of exercise or activity, such as strength training, flexibility training, aerobics, cardiac rehabilitation, etc.", - "rdfs:label": "exerciseType" - }, - { - "@id": "schema:fromLocation", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MoveAction", - "schema:TransferAction", - "schema:ExerciseAction" - ], - "rangeIncludes": [ - "schema:Number", - "schema:Place" - ], - "rdfs:comment": "A sub property of location. The original location of the object or the agent before the action.", - "rdfs:label": "fromLocation", - "rdfs:subPropertyOf": "schema:location" - }, - { - "@id": "schema:opponent", - "@type": "rdf:Property", - "domainIncludes": "schema:ExerciseAction", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A sub property of participant. The opponent on this action.", - "rdfs:label": "opponent", - "rdfs:subPropertyOf": "schema:participant" - }, - { - "@id": "schema:sportsActivityLocation", - "@type": "rdf:Property", - "domainIncludes": "schema:ExerciseAction", - "rangeIncludes": "schema:SportsActivityLocation", - "rdfs:comment": "A sub property of location. The sports activity location where this action occurred.", - "rdfs:label": "sportsActivityLocation", - "rdfs:subPropertyOf": "schema:location" - }, - { - "@id": "schema:sportsEvent", - "@type": "rdf:Property", - "domainIncludes": "schema:ExerciseAction", - "rangeIncludes": "schema:SportsEvent", - "rdfs:comment": "A sub property of location. The sports event where this action occurred.", - "rdfs:label": "sportsEvent", - "rdfs:subPropertyOf": "schema:location" - }, - { - "@id": "schema:sportsTeam", - "@type": "rdf:Property", - "domainIncludes": "schema:ExerciseAction", - "rangeIncludes": "schema:SportsTeam", - "rdfs:comment": "A sub property of participant. The sports team that participated on this action.", - "rdfs:label": "sportsTeam", - "rdfs:subPropertyOf": "schema:participant" - }, - { - "@id": "schema:toLocation", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:InsertAction", - "schema:MoveAction", - "schema:TransferAction", - "schema:ExerciseAction" - ], - "rangeIncludes": [ - "schema:Number", - "schema:Place" - ], - "rdfs:comment": "A sub property of location. The final location of the object or the agent after the action.", - "rdfs:label": "toLocation", - "rdfs:subPropertyOf": "schema:location" - } - ] - }, - { - "@id": "schema:PerformAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of participating in performance arts.", - "rdfs:label": "PerformAction", - "rdfs:subClassOf": "schema:PlayAction", - "properties": [ - { - "@id": "schema:entertainmentBusiness", - "@type": "rdf:Property", - "domainIncludes": "schema:PerformAction", - "rangeIncludes": "schema:EntertainmentBusiness", - "rdfs:comment": "A sub property of location. The entertainment business where the action occurred.", - "rdfs:label": "entertainmentBusiness", - "rdfs:subPropertyOf": "schema:location" - } - ] - } - ], - "properties": [ - { - "@id": "schema:audience", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Product", - "schema:PlayAction" - ], - "rangeIncludes": "schema:Audience", - "rdfs:comment": "The intended audience of the item, i.e. the group for whom the item was created.", - "rdfs:label": "audience" - }, - { - "@id": "schema:event", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:InformAction", - "schema:PlayAction", - "schema:InviteAction", - "schema:JoinAction", - "schema:LeaveAction" - ], - "rangeIncludes": "schema:Event", - "rdfs:comment": "Upcoming or past event associated with this place or organization.", - "rdfs:label": "event" - } - ] - }, - { - "@id": "schema:SearchAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of searching for an object.

Related actions:

", - "rdfs:label": "SearchAction", - "rdfs:subClassOf": "schema:Action", - "properties": [ - { - "@id": "schema:query", - "@type": "rdf:Property", - "domainIncludes": "schema:SearchAction", - "rangeIncludes": [ - "schema:Class", - "schema:Text" - ], - "rdfs:comment": "A sub property of instrument. The query used on this action.", - "rdfs:label": "query", - "rdfs:subPropertyOf": "schema:instrument" - } - ] - }, - { - "@id": "schema:TradeAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of participating in an exchange of goods and services for monetary compensation. An agent trades an object, product or service with a participant in exchange for a one time or periodic payment.", - "rdfs:label": "TradeAction", - "rdfs:subClassOf": "schema:Action", - "children": [ - { - "@id": "schema:BuyAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of giving money to a seller in exchange for goods or services rendered. An agent buys an object, product, or service from a seller for a price. Reciprocal of SellAction.", - "rdfs:label": "BuyAction", - "rdfs:subClassOf": "schema:TradeAction", - "properties": [ - { - "@id": "schema:vendor", - "@type": "rdf:Property", - "domainIncludes": "schema:BuyAction", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A sub property of participant. The seller. The participant/person/organization that sold the object.", - "rdfs:label": "vendor", - "rdfs:subPropertyOf": "schema:participant" - }, - { - "@id": "schema:warrantyPromise", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:BuyAction", - "schema:SellAction" - ], - "rangeIncludes": "schema:WarrantyPromise", - "rdfs:comment": "The warranty promise(s) included in the offer.", - "rdfs:label": "warrantyPromise" - } - ] - }, - { - "@id": "schema:DonateAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of providing goods, services, or money without compensation, often for philanthropic reasons.", - "rdfs:label": "DonateAction", - "rdfs:subClassOf": "schema:TradeAction", - "properties": [ - { - "@id": "schema:recipient", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CommunicateAction", - "schema:AuthorizeAction", - "schema:DonateAction", - "schema:GiveAction", - "schema:PayAction", - "schema:ReturnAction", - "schema:SendAction", - "schema:TipAction" - ], - "rangeIncludes": [ - "schema:Audience", - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A sub property of participant. The participant who is at the receiving end of the action.", - "rdfs:label": "recipient", - "rdfs:subPropertyOf": "schema:participant" - } - ] - }, - { - "@id": "schema:OrderAction", - "@type": "rdfs:Class", - "rdfs:comment": "An agent orders an object/product/service to be delivered/sent.", - "rdfs:label": "OrderAction", - "rdfs:subClassOf": "schema:TradeAction" - }, - { - "@id": "schema:PayAction", - "@type": "rdfs:Class", - "rdfs:comment": "An agent pays a price to a participant.", - "rdfs:label": "PayAction", - "rdfs:subClassOf": "schema:TradeAction", - "properties": [ - { - "@id": "schema:purpose", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MedicalDevice", - "schema:AllocateAction", - "schema:PayAction" - ], - "rangeIncludes": [ - "schema:MedicalDevicePurpose", - "schema:Thing" - ], - "rdfs:comment": "A goal towards an action is taken. Can be concrete or abstract.", - "rdfs:label": "purpose" - }, - { - "@id": "schema:recipient", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CommunicateAction", - "schema:AuthorizeAction", - "schema:DonateAction", - "schema:GiveAction", - "schema:PayAction", - "schema:ReturnAction", - "schema:SendAction", - "schema:TipAction" - ], - "rangeIncludes": [ - "schema:Audience", - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A sub property of participant. The participant who is at the receiving end of the action.", - "rdfs:label": "recipient", - "rdfs:subPropertyOf": "schema:participant" - } - ] - }, - { - "@id": "schema:QuoteAction", - "@type": "rdfs:Class", - "rdfs:comment": "An agent quotes/estimates/appraises an object/product/service with a price at a location/store.", - "rdfs:label": "QuoteAction", - "rdfs:subClassOf": "schema:TradeAction" - }, - { - "@id": "schema:RentAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of giving money in return for temporary use, but not ownership, of an object such as a vehicle or property. For example, an agent rents a property from a landlord in exchange for a periodic payment.", - "rdfs:label": "RentAction", - "rdfs:subClassOf": "schema:TradeAction", - "properties": [ - { - "@id": "schema:landlord", - "@type": "rdf:Property", - "domainIncludes": "schema:RentAction", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A sub property of participant. The owner of the real estate property.", - "rdfs:label": "landlord", - "rdfs:subPropertyOf": "schema:participant" - }, - { - "@id": "schema:realEstateAgent", - "@type": "rdf:Property", - "domainIncludes": "schema:RentAction", - "rangeIncludes": "schema:RealEstateAgent", - "rdfs:comment": "A sub property of participant. The real estate agent involved in the action.", - "rdfs:label": "realEstateAgent", - "rdfs:subPropertyOf": "schema:participant" - } - ] - }, - { - "@id": "schema:SellAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of taking money from a buyer in exchange for goods or services rendered. An agent sells an object, product, or service to a buyer for a price. Reciprocal of BuyAction.", - "rdfs:label": "SellAction", - "rdfs:subClassOf": "schema:TradeAction", - "properties": [ - { - "@id": "schema:buyer", - "@type": "rdf:Property", - "domainIncludes": "schema:SellAction", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A sub property of participant. The participant/person/organization that bought the object.", - "rdfs:label": "buyer", - "rdfs:subPropertyOf": "schema:participant" - }, - { - "@id": "schema:warrantyPromise", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:BuyAction", - "schema:SellAction" - ], - "rangeIncludes": "schema:WarrantyPromise", - "rdfs:comment": "The warranty promise(s) included in the offer.", - "rdfs:label": "warrantyPromise" - } - ] - }, - { - "@id": "schema:TipAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of giving money voluntarily to a beneficiary in recognition of services rendered.", - "rdfs:label": "TipAction", - "rdfs:subClassOf": "schema:TradeAction", - "properties": [ - { - "@id": "schema:recipient", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CommunicateAction", - "schema:AuthorizeAction", - "schema:DonateAction", - "schema:GiveAction", - "schema:PayAction", - "schema:ReturnAction", - "schema:SendAction", - "schema:TipAction" - ], - "rangeIncludes": [ - "schema:Audience", - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A sub property of participant. The participant who is at the receiving end of the action.", - "rdfs:label": "recipient", - "rdfs:subPropertyOf": "schema:participant" - } - ] - } - ], - "properties": [ - { - "@id": "schema:price", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:TradeAction" - ], - "rangeIncludes": [ - "schema:Number", - "schema:Text" - ], - "rdfs:comment": "The offer price of a product, or of a price component when attached to PriceSpecification and its subtypes.", - "rdfs:label": "price" - } - ] - }, - { - "@id": "schema:TransferAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of transferring/moving (abstract or concrete) animate or inanimate objects from one place to another.", - "rdfs:label": "TransferAction", - "rdfs:subClassOf": "schema:Action", - "children": [ - { - "@id": "schema:BorrowAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of obtaining an object under an agreement to return it at a later date. Reciprocal of LendAction.

Related actions:

", - "rdfs:label": "BorrowAction", - "rdfs:subClassOf": "schema:TransferAction", - "properties": [ - { - "@id": "schema:lender", - "@type": "rdf:Property", - "domainIncludes": "schema:BorrowAction", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A sub property of participant. The person that lends the object being borrowed.", - "rdfs:label": "lender", - "rdfs:subPropertyOf": "schema:participant" - } - ] - }, - { - "@id": "schema:DownloadAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of downloading an object.", - "rdfs:label": "DownloadAction", - "rdfs:subClassOf": "schema:TransferAction" - }, - { - "@id": "schema:GiveAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of transferring ownership of an object to a destination. Reciprocal of TakeAction.

Related actions:

", - "rdfs:label": "GiveAction", - "rdfs:subClassOf": "schema:TransferAction", - "properties": [ - { - "@id": "schema:recipient", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CommunicateAction", - "schema:AuthorizeAction", - "schema:DonateAction", - "schema:GiveAction", - "schema:PayAction", - "schema:ReturnAction", - "schema:SendAction", - "schema:TipAction" - ], - "rangeIncludes": [ - "schema:Audience", - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A sub property of participant. The participant who is at the receiving end of the action.", - "rdfs:label": "recipient", - "rdfs:subPropertyOf": "schema:participant" - } - ] - }, - { - "@id": "schema:LendAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of providing an object under an agreement that it will be returned at a later date. Reciprocal of BorrowAction.

Related actions:

", - "rdfs:label": "LendAction", - "rdfs:subClassOf": "schema:TransferAction", - "properties": [ - { - "@id": "schema:borrower", - "@type": "rdf:Property", - "domainIncludes": "schema:LendAction", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A sub property of participant. The person that borrows the object being lent.", - "rdfs:label": "borrower", - "rdfs:subPropertyOf": "schema:participant" - } - ] - }, - { - "@id": "schema:ReceiveAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of physically/electronically taking delivery of an object thathas been transferred from an origin to a destination. Reciprocal of SendAction.

Related actions:

", - "rdfs:label": "ReceiveAction", - "rdfs:subClassOf": "schema:TransferAction", - "properties": [ - { - "@id": "schema:deliveryMethod", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:ReceiveAction", - "schema:SendAction", - "schema:TrackAction" - ], - "rangeIncludes": "schema:DeliveryMethod", - "rdfs:comment": "A sub property of instrument. The method of delivery", - "rdfs:label": "deliveryMethod", - "rdfs:subPropertyOf": "schema:instrument" - }, - { - "@id": "schema:sender", - "@type": "rdf:Property", - "domainIncludes": "schema:ReceiveAction", - "rangeIncludes": [ - "schema:Audience", - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A sub property of participant. The participant who is at the sending end of the action.", - "rdfs:label": "sender", - "rdfs:subPropertyOf": "schema:participant" - } - ] - }, - { - "@id": "schema:ReturnAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of returning to the origin that which was previously received (concrete objects) or taken (ownership).", - "rdfs:label": "ReturnAction", - "rdfs:subClassOf": "schema:TransferAction", - "properties": [ - { - "@id": "schema:recipient", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CommunicateAction", - "schema:AuthorizeAction", - "schema:DonateAction", - "schema:GiveAction", - "schema:PayAction", - "schema:ReturnAction", - "schema:SendAction", - "schema:TipAction" - ], - "rangeIncludes": [ - "schema:Audience", - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A sub property of participant. The participant who is at the receiving end of the action.", - "rdfs:label": "recipient", - "rdfs:subPropertyOf": "schema:participant" - } - ] - }, - { - "@id": "schema:SendAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of physically/electronically dispatching an object for transfer from an origin to a destination.

Related actions:

", - "rdfs:label": "SendAction", - "rdfs:subClassOf": "schema:TransferAction", - "properties": [ - { - "@id": "schema:deliveryMethod", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:ReceiveAction", - "schema:SendAction", - "schema:TrackAction" - ], - "rangeIncludes": "schema:DeliveryMethod", - "rdfs:comment": "A sub property of instrument. The method of delivery", - "rdfs:label": "deliveryMethod", - "rdfs:subPropertyOf": "schema:instrument" - }, - { - "@id": "schema:recipient", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CommunicateAction", - "schema:AuthorizeAction", - "schema:DonateAction", - "schema:GiveAction", - "schema:PayAction", - "schema:ReturnAction", - "schema:SendAction", - "schema:TipAction" - ], - "rangeIncludes": [ - "schema:Audience", - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A sub property of participant. The participant who is at the receiving end of the action.", - "rdfs:label": "recipient", - "rdfs:subPropertyOf": "schema:participant" - } - ] - }, - { - "@id": "schema:TakeAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of gaining ownership of an object from an origin. Reciprocal of GiveAction.

Related actions:

", - "rdfs:label": "TakeAction", - "rdfs:subClassOf": "schema:TransferAction" - } - ], - "properties": [ - { - "@id": "schema:fromLocation", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MoveAction", - "schema:TransferAction", - "schema:ExerciseAction" - ], - "rangeIncludes": [ - "schema:Number", - "schema:Place" - ], - "rdfs:comment": "A sub property of location. The original location of the object or the agent before the action.", - "rdfs:label": "fromLocation", - "rdfs:subPropertyOf": "schema:location" - }, - { - "@id": "schema:toLocation", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:InsertAction", - "schema:MoveAction", - "schema:TransferAction", - "schema:ExerciseAction" - ], - "rangeIncludes": [ - "schema:Number", - "schema:Place" - ], - "rdfs:comment": "A sub property of location. The final location of the object or the agent after the action.", - "rdfs:label": "toLocation", - "rdfs:subPropertyOf": "schema:location" - } - ] - }, - { - "@id": "schema:UpdateAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of managing by changing/editing the state of the object.", - "rdfs:label": "UpdateAction", - "rdfs:subClassOf": "schema:Action", - "children": [ - { - "@id": "schema:AddAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of editing by adding an object to a collection.", - "rdfs:label": "AddAction", - "rdfs:subClassOf": "schema:UpdateAction", - "children": [ - { - "@id": "schema:InsertAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of adding at a specific location in an ordered collection.", - "rdfs:label": "InsertAction", - "rdfs:subClassOf": "schema:AddAction", - "children": [ - { - "@id": "schema:AppendAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of inserting at the end if an ordered collection.", - "rdfs:label": "AppendAction", - "rdfs:subClassOf": "schema:InsertAction" - }, - { - "@id": "schema:PrependAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of inserting at the beginning if an ordered collection.", - "rdfs:label": "PrependAction", - "rdfs:subClassOf": "schema:InsertAction" - } - ], - "properties": [ - { - "@id": "schema:toLocation", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:InsertAction", - "schema:MoveAction", - "schema:TransferAction", - "schema:ExerciseAction" - ], - "rangeIncludes": [ - "schema:Number", - "schema:Place" - ], - "rdfs:comment": "A sub property of location. The final location of the object or the agent after the action.", - "rdfs:label": "toLocation", - "rdfs:subPropertyOf": "schema:location" - } - ] - } - ] - }, - { - "@id": "schema:DeleteAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of editing a recipient by removing one of its objects.", - "rdfs:label": "DeleteAction", - "rdfs:subClassOf": "schema:UpdateAction" - }, - { - "@id": "schema:ReplaceAction", - "@type": "rdfs:Class", - "rdfs:comment": "The act of editing a recipient by replacing an old object with a new object.", - "rdfs:label": "ReplaceAction", - "rdfs:subClassOf": "schema:UpdateAction", - "properties": [ - { - "@id": "schema:replacee", - "@type": "rdf:Property", - "domainIncludes": "schema:ReplaceAction", - "rangeIncludes": "schema:Thing", - "rdfs:comment": "A sub property of object. The object that is being replaced.", - "rdfs:label": "replacee", - "rdfs:subPropertyOf": "schema:object" - }, - { - "@id": "schema:replacer", - "@type": "rdf:Property", - "domainIncludes": "schema:ReplaceAction", - "rangeIncludes": "schema:Thing", - "rdfs:comment": "A sub property of object. The object that replaces.", - "rdfs:label": "replacer", - "rdfs:subPropertyOf": "schema:object" - } - ] - } - ], - "properties": [ - { - "@id": "schema:collection", - "@type": "rdf:Property", - "domainIncludes": "schema:UpdateAction", - "rangeIncludes": "schema:Thing", - "rdfs:comment": "A sub property of object. The collection target of the action.", - "rdfs:label": "collection", - "rdfs:subPropertyOf": "schema:object" - } - ] - } - ], - "properties": [ - { - "@id": "schema:actionStatus", - "@type": "rdf:Property", - "domainIncludes": "schema:Action", - "rangeIncludes": "schema:ActionStatusType", - "rdfs:comment": "Indicates the current disposition of the Action.", - "rdfs:label": "actionStatus" - }, - { - "@id": "schema:agent", - "@type": "rdf:Property", - "domainIncludes": "schema:Action", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "The direct performer or driver of the action (animate or inanimate). e.g. *John* wrote a book.", - "rdfs:label": "agent" - }, - { - "@id": "schema:endTime", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Action", - "schema:FoodEstablishmentReservation" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The endTime of something. For a reserved event or service (e.g. FoodEstablishmentReservation), the time that it is expected to end. For actions that span a period of time, when the action was performed. e.g. John wrote a book from January to *December*.\n\nNote that Event uses startDate/endDate instead of startTime/endTime, even when describing dates with times. This situation may be clarified in future revisions.\n", - "rdfs:label": "endTime" - }, - { - "@id": "schema:instrument", - "@type": "rdf:Property", - "domainIncludes": "schema:Action", - "rangeIncludes": "schema:Thing", - "rdfs:comment": "The object that helped the agent perform the action. e.g. John wrote a book with *a pen*.", - "rdfs:label": "instrument" - }, - { - "@id": "schema:location", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Event", - "schema:Action" - ], - "rangeIncludes": [ - "schema:Place", - "schema:PostalAddress" - ], - "rdfs:comment": "The location of the event, organization or action.", - "rdfs:label": "location" - }, - { - "@id": "schema:object", - "@type": "rdf:Property", - "domainIncludes": "schema:Action", - "rangeIncludes": "schema:Thing", - "rdfs:comment": "The object upon the action is carried out, whose state is kept intact or changed. Also known as the semantic roles patient, affected or undergoer (which change their state) or theme (which doesn't). e.g. John read *a book*.", - "rdfs:label": "object" - }, - { - "@id": "schema:participant", - "@type": "rdf:Property", - "domainIncludes": "schema:Action", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "Other co-agents that participated in the action indirectly. e.g. John wrote a book with *Steve*.", - "rdfs:label": "participant" - }, - { - "@id": "schema:result", - "@type": "rdf:Property", - "domainIncludes": "schema:Action", - "rangeIncludes": "schema:Thing", - "rdfs:comment": "The result produced in the action. e.g. John wrote *a book*.", - "rdfs:label": "result" - }, - { - "@id": "schema:startTime", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Action", - "schema:FoodEstablishmentReservation" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The startTime of something. For a reserved event or service (e.g. FoodEstablishmentReservation), the time that it is expected to start. For actions that span a period of time, when the action was performed. e.g. John wrote a book from *January* to December.\n\nNote that Event uses startDate/endDate instead of startTime/endTime, even when describing dates with times. This situation may be clarified in future revisions.\n", - "rdfs:label": "startTime" - }, - { - "@id": "schema:target", - "@type": "rdf:Property", - "domainIncludes": "schema:Action", - "rangeIncludes": "schema:EntryPoint", - "rdfs:comment": "Indicates a target EntryPoint for an Action.", - "rdfs:label": "target" - } - ] - }, - { - "@id": "schema:BroadcastService", - "@type": "rdfs:Class", - "rdfs:comment": "A delivery service through which content is provided via broadcast over the air or online.", - "rdfs:label": "BroadcastService", - "rdfs:subClassOf": "schema:Thing", - "properties": [ - { - "@id": "schema:area", - "@type": "rdf:Property", - "domainIncludes": "schema:BroadcastService", - "rangeIncludes": "schema:Place", - "rdfs:comment": "The area within which users can expect to reach the broadcast service.", - "rdfs:label": "area" - }, - { - "@id": "schema:broadcaster", - "@type": "rdf:Property", - "domainIncludes": "schema:BroadcastService", - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The organization owning or operating the broadcast service.", - "rdfs:label": "broadcaster" - }, - { - "@id": "schema:parentService", - "@type": "rdf:Property", - "domainIncludes": "schema:BroadcastService", - "rangeIncludes": "schema:BroadcastService", - "rdfs:comment": "A broadcast service to which the broadcast service may belong to such as regional variations of a national channel.", - "rdfs:label": "parentService" - } - ] - }, - { - "@id": "schema:CreativeWork", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_rNews" - }, - "rdfs:comment": "The most generic kind of creative work, including books, movies, photographs, software programs, etc.", - "rdfs:label": "CreativeWork", - "rdfs:subClassOf": "schema:Thing", - "children": [ - { - "@id": "schema:Answer", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_QAStackExchange" - }, - "rdfs:comment": "An answer offered to a question; perhaps correct, perhaps opinionated or wrong.", - "rdfs:label": "Answer", - "rdfs:subClassOf": "schema:CreativeWork", - "properties": [ - { - "@id": "schema:downvoteCount", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Question", - "schema:Answer", - "schema:Comment" - ], - "rangeIncludes": "schema:Integer", - "rdfs:comment": "The number of downvotes this question has received from the community.", - "rdfs:label": "downvoteCount" - }, - { - "@id": "schema:parentItem", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Answer", - "schema:Comment" - ], - "rangeIncludes": "schema:Question", - "rdfs:comment": "The parent of a question, answer or item in general.", - "rdfs:label": "parentItem" - }, - { - "@id": "schema:upvoteCount", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Question", - "schema:Answer", - "schema:Comment" - ], - "rangeIncludes": "schema:Integer", - "rdfs:comment": "The number of upvotes this question has received from the community.", - "rdfs:label": "upvoteCount" - } - ] - }, - { - "@id": "schema:Article", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_rNews" - }, - "rdfs:comment": "An article, such as a news article or piece of investigative report. Newspapers and magazines have articles of many different types and this is intended to cover them all.", - "rdfs:label": "Article", - "rdfs:subClassOf": "schema:CreativeWork", - "children": [ - { - "@id": "schema:BlogPosting", - "@type": "rdfs:Class", - "rdfs:comment": "A blog post.", - "rdfs:label": "BlogPosting", - "rdfs:subClassOf": "schema:Article" - }, - { - "@id": "schema:NewsArticle", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_rNews" - }, - "rdfs:comment": "A news article", - "rdfs:label": "NewsArticle", - "rdfs:subClassOf": "schema:Article", - "properties": [ - { - "@id": "schema:dateline", - "@type": "rdf:Property", - "domainIncludes": "schema:NewsArticle", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The location where the NewsArticle was produced.", - "rdfs:label": "dateline" - }, - { - "@id": "schema:printColumn", - "@type": "rdf:Property", - "domainIncludes": "schema:NewsArticle", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The number of the column in which the NewsArticle appears in the print edition.", - "rdfs:label": "printColumn" - }, - { - "@id": "schema:printEdition", - "@type": "rdf:Property", - "domainIncludes": "schema:NewsArticle", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The edition of the print product in which the NewsArticle appears.", - "rdfs:label": "printEdition" - }, - { - "@id": "schema:printPage", - "@type": "rdf:Property", - "domainIncludes": "schema:NewsArticle", - "rangeIncludes": "schema:Text", - "rdfs:comment": "If this NewsArticle appears in print, this field indicates the name of the page on which the article is found. Please note that this field is intended for the exact page name (e.g. A5, B18).", - "rdfs:label": "printPage" - }, - { - "@id": "schema:printSection", - "@type": "rdf:Property", - "domainIncludes": "schema:NewsArticle", - "rangeIncludes": "schema:Text", - "rdfs:comment": "If this NewsArticle appears in print, this field indicates the print section in which the article appeared.", - "rdfs:label": "printSection" - } - ] - }, - { - "@id": "schema:ScholarlyArticle", - "@type": "rdfs:Class", - "rdfs:comment": "A scholarly article.", - "rdfs:label": "ScholarlyArticle", - "rdfs:subClassOf": "schema:Article", - "children": [ - { - "@id": "schema:MedicalScholarlyArticle", - "@type": "rdfs:Class", - "rdfs:comment": "A scholarly article in the medical domain.", - "rdfs:label": "MedicalScholarlyArticle", - "rdfs:subClassOf": "schema:ScholarlyArticle", - "properties": [ - { - "@id": "schema:publicationType", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalScholarlyArticle", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The type of the medical article, taken from the US NLM MeSH publication type catalog.", - "rdfs:label": "publicationType" - } - ] - } - ] - }, - { - "@id": "schema:TechArticle", - "@type": "rdfs:Class", - "rdfs:comment": "A technical article - Example: How-to (task) topics, step-by-step, procedural troubleshooting, specifications, etc.", - "rdfs:label": "TechArticle", - "rdfs:subClassOf": "schema:Article", - "children": [ - { - "@id": "schema:APIReference", - "@type": "rdfs:Class", - "rdfs:comment": "Reference documentation for application programming interfaces (APIs).", - "rdfs:label": "APIReference", - "rdfs:subClassOf": "schema:TechArticle", - "properties": [ - { - "@id": "schema:assembly", - "@type": "rdf:Property", - "domainIncludes": "schema:APIReference", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Library file name e.g., mscorlib.dll, system.web.dll", - "rdfs:label": "assembly" - }, - { - "@id": "schema:assemblyVersion", - "@type": "rdf:Property", - "domainIncludes": "schema:APIReference", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Associated product/technology version. e.g., .NET Framework 4.5", - "rdfs:label": "assemblyVersion" - }, - { - "@id": "schema:programmingModel", - "@type": "rdf:Property", - "domainIncludes": "schema:APIReference", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Indicates whether API is managed or unmanaged.", - "rdfs:label": "programmingModel" - }, - { - "@id": "schema:targetPlatform", - "@type": "rdf:Property", - "domainIncludes": "schema:APIReference", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Type of app development: phone, Metro style, desktop, XBox, etc.", - "rdfs:label": "targetPlatform" - } - ] - } - ], - "properties": [ - { - "@id": "schema:dependencies", - "@type": "rdf:Property", - "domainIncludes": "schema:TechArticle", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Prerequisites needed to fulfill steps in article.", - "rdfs:label": "dependencies" - }, - { - "@id": "schema:proficiencyLevel", - "@type": "rdf:Property", - "domainIncludes": "schema:TechArticle", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Proficiency needed for this content; expected values: 'Beginner', 'Expert'.", - "rdfs:label": "proficiencyLevel" - } - ] - } - ], - "properties": [ - { - "@id": "schema:articleBody", - "@type": "rdf:Property", - "domainIncludes": "schema:Article", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The actual body of the article.", - "rdfs:label": "articleBody" - }, - { - "@id": "schema:articleSection", - "@type": "rdf:Property", - "domainIncludes": "schema:Article", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Articles may belong to one or more 'sections' in a magazine or newspaper, such as Sports, Lifestyle, etc.", - "rdfs:label": "articleSection" - }, - { - "@id": "schema:wordCount", - "@type": "rdf:Property", - "domainIncludes": "schema:Article", - "rangeIncludes": "schema:Integer", - "rdfs:comment": "The number of words in the text of the Article.", - "rdfs:label": "wordCount" - } - ] - }, - { - "@id": "schema:Blog", - "@type": "rdfs:Class", - "rdfs:comment": "A blog", - "rdfs:label": "Blog", - "rdfs:subClassOf": "schema:CreativeWork", - "properties": [ - { - "@id": "schema:blogPost", - "@type": "rdf:Property", - "domainIncludes": "schema:Blog", - "rangeIncludes": "schema:BlogPosting", - "rdfs:comment": "A posting that is part of this blog.", - "rdfs:label": "blogPost" - }, - { - "@id": "schema:blogPosts", - "@type": "rdf:Property", - "domainIncludes": "schema:Blog", - "rangeIncludes": "schema:BlogPosting", - "rdfs:comment": "The postings that are part of this blog (legacy spelling; see singular form, blogPost).", - "rdfs:label": "blogPosts", - "supercededBy": "schema:blogPost" - } - ] - }, - { - "@id": "schema:Book", - "@type": "rdfs:Class", - "rdfs:comment": "A book.", - "rdfs:label": "Book", - "rdfs:subClassOf": "schema:CreativeWork", - "properties": [ - { - "@id": "schema:bookEdition", - "@type": "rdf:Property", - "domainIncludes": "schema:Book", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The edition of the book.", - "rdfs:label": "bookEdition" - }, - { - "@id": "schema:bookFormat", - "@type": "rdf:Property", - "domainIncludes": "schema:Book", - "rangeIncludes": "schema:BookFormatType", - "rdfs:comment": "The format of the book.", - "rdfs:label": "bookFormat" - }, - { - "@id": "schema:illustrator", - "@type": "rdf:Property", - "domainIncludes": "schema:Book", - "rangeIncludes": "schema:Person", - "rdfs:comment": "The illustrator of the book.", - "rdfs:label": "illustrator" - }, - { - "@id": "schema:isbn", - "@type": "rdf:Property", - "domainIncludes": "schema:Book", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The ISBN of the book.", - "rdfs:label": "isbn" - }, - { - "@id": "schema:numberOfPages", - "@type": "rdf:Property", - "domainIncludes": "schema:Book", - "rangeIncludes": "schema:Integer", - "rdfs:comment": "The number of pages in the book.", - "rdfs:label": "numberOfPages" - } - ] - }, - { - "@id": "schema:Clip", - "@type": "rdfs:Class", - "rdfs:comment": "A short TV or radio program or a segment/part of a program.", - "rdfs:label": "Clip", - "rdfs:subClassOf": "schema:CreativeWork", - "children": [ - { - "@id": "schema:RadioClip", - "@type": "rdfs:Class", - "rdfs:comment": "A short radio program or a segment/part of a radio program.", - "rdfs:label": "RadioClip", - "rdfs:subClassOf": "schema:Clip", - "properties": [ - { - "@id": "schema:partOfSeason", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:TVEpisode", - "schema:Clip", - "schema:RadioClip", - "schema:RadioEpisode", - "schema:TVClip" - ], - "rangeIncludes": "schema:Season", - "rdfs:comment": "The season to which this episode belongs.", - "rdfs:label": "partOfSeason" - }, - { - "@id": "schema:partOfSeries", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Clip", - "schema:RadioClip", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:TVClip" - ], - "rangeIncludes": "schema:Series", - "rdfs:comment": "The series to which this episode or season belongs.", - "rdfs:label": "partOfSeries" - } - ] - }, - { - "@id": "schema:TVClip", - "@type": "rdfs:Class", - "rdfs:comment": "A short TV program or a segment/part of a TV program.", - "rdfs:label": "TVClip", - "rdfs:subClassOf": "schema:Clip", - "properties": [ - { - "@id": "schema:partOfSeason", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:TVEpisode", - "schema:Clip", - "schema:RadioClip", - "schema:RadioEpisode", - "schema:TVClip" - ], - "rangeIncludes": "schema:Season", - "rdfs:comment": "The season to which this episode belongs.", - "rdfs:label": "partOfSeason" - }, - { - "@id": "schema:partOfSeries", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Clip", - "schema:RadioClip", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:TVClip" - ], - "rangeIncludes": "schema:Series", - "rdfs:comment": "The series to which this episode or season belongs.", - "rdfs:label": "partOfSeries" - }, - { - "@id": "schema:partOfTVSeries", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:TVEpisode", - "schema:TVSeason", - "schema:TVClip" - ], - "rangeIncludes": "schema:TVSeries", - "rdfs:comment": "The TV series to which this episode or season belongs. (legacy form; partOfSeries is preferred)", - "rdfs:label": "partOfTVSeries", - "supercededBy": "schema:partOfSeries" - } - ] - } - ], - "properties": [ - { - "@id": "schema:clipNumber", - "@type": "rdf:Property", - "domainIncludes": "schema:Clip", - "rangeIncludes": "schema:Integer", - "rdfs:comment": "Position of the clip within an ordered group of clips.", - "rdfs:label": "clipNumber" - }, - { - "@id": "schema:partOfEpisode", - "@type": "rdf:Property", - "domainIncludes": "schema:Clip", - "rangeIncludes": "schema:Episode", - "rdfs:comment": "The episode to which this clip belongs.", - "rdfs:label": "partOfEpisode" - }, - { - "@id": "schema:partOfSeason", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:TVEpisode", - "schema:Clip", - "schema:RadioClip", - "schema:RadioEpisode", - "schema:TVClip" - ], - "rangeIncludes": "schema:Season", - "rdfs:comment": "The season to which this episode belongs.", - "rdfs:label": "partOfSeason" - }, - { - "@id": "schema:partOfSeries", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Clip", - "schema:RadioClip", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:TVClip" - ], - "rangeIncludes": "schema:Series", - "rdfs:comment": "The series to which this episode or season belongs.", - "rdfs:label": "partOfSeries" - }, - { - "@id": "schema:position", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:Season", - "schema:Clip" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "Free text to define other than pure numerical ranking of an episode or a season in an ordered list of items (further formatting restrictions may apply within particular user groups).", - "rdfs:label": "position" - }, - { - "@id": "schema:publication", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Episode", - "schema:Clip" - ], - "rangeIncludes": "schema:PublicationEvent", - "rdfs:comment": "A publication event associated with the episode, clip or media object.", - "rdfs:label": "publication" - } - ] - }, - { - "@id": "schema:Code", - "@type": "rdfs:Class", - "rdfs:comment": "Computer programming source code. Example: Full (compile ready) solutions, code snippet samples, scripts, templates.", - "rdfs:label": "Code", - "rdfs:subClassOf": "schema:CreativeWork", - "properties": [ - { - "@id": "schema:codeRepository", - "@type": "rdf:Property", - "domainIncludes": "schema:Code", - "rangeIncludes": "schema:URL", - "rdfs:comment": "Link to the repository where the un-compiled, human readable code and related code is located (SVN, github, CodePlex)", - "rdfs:label": "codeRepository" - }, - { - "@id": "schema:programmingLanguage", - "@type": "rdf:Property", - "domainIncludes": "schema:Code", - "rangeIncludes": "schema:Thing", - "rdfs:comment": "The computer programming language.", - "rdfs:label": "programmingLanguage" - }, - { - "@id": "schema:runtime", - "@type": "rdf:Property", - "domainIncludes": "schema:Code", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Runtime platform or script interpreter dependencies (Example - Java v1, Python2.3, .Net Framework 3.0)", - "rdfs:label": "runtime" - }, - { - "@id": "schema:sampleType", - "@type": "rdf:Property", - "domainIncludes": "schema:Code", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Full (compile ready) solution, code snippet, inline code, scripts, template.", - "rdfs:label": "sampleType" - }, - { - "@id": "schema:targetProduct", - "@type": "rdf:Property", - "domainIncludes": "schema:Code", - "rangeIncludes": "schema:SoftwareApplication", - "rdfs:comment": "Target Operating System / Product to which the code applies. If applies to several versions, just the product name can be used.", - "rdfs:label": "targetProduct" - } - ] - }, - { - "@id": "schema:Comment", - "@type": "rdfs:Class", - "rdfs:comment": "A comment on an item - for example, a comment on a blog post. The comment's content is expressed via the \"text\" property, and its topic via \"about\", properties shared with all CreativeWorks.", - "rdfs:label": "Comment", - "rdfs:subClassOf": "schema:CreativeWork", - "properties": [ - { - "@id": "schema:downvoteCount", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Question", - "schema:Answer", - "schema:Comment" - ], - "rangeIncludes": "schema:Integer", - "rdfs:comment": "The number of downvotes this question has received from the community.", - "rdfs:label": "downvoteCount" - }, - { - "@id": "schema:parentItem", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Answer", - "schema:Comment" - ], - "rangeIncludes": "schema:Question", - "rdfs:comment": "The parent of a question, answer or item in general.", - "rdfs:label": "parentItem" - }, - { - "@id": "schema:upvoteCount", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Question", - "schema:Answer", - "schema:Comment" - ], - "rangeIncludes": "schema:Integer", - "rdfs:comment": "The number of upvotes this question has received from the community.", - "rdfs:label": "upvoteCount" - } - ] - }, - { - "@id": "schema:DataCatalog", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_DatasetClass" - }, - "owl:equivalentClass": "dcat:DataCatalog", - "rdfs:comment": "A collection of datasets.", - "rdfs:label": "DataCatalog", - "rdfs:subClassOf": "schema:CreativeWork", - "properties": [ - { - "@id": "schema:dataset", - "@type": "rdf:Property", - "domainIncludes": "schema:DataCatalog", - "rangeIncludes": "schema:Dataset", - "rdfs:comment": "A dataset contained in a catalog.", - "rdfs:label": "dataset" - } - ] - }, - { - "@id": "schema:Dataset", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_DatasetClass" - }, - "owl:equivalentClass": [ - "dcat:Dataset", - "void:Dataset", - "dc:Dataset" - ], - "rdfs:comment": "A body of structured information describing some topic(s) of interest.", - "rdfs:label": "Dataset", - "rdfs:subClassOf": "schema:CreativeWork", - "properties": [ - { - "@id": "schema:catalog", - "@type": "rdf:Property", - "domainIncludes": "schema:Dataset", - "rangeIncludes": "schema:DataCatalog", - "rdfs:comment": "A data catalog which contains a dataset.", - "rdfs:label": "catalog" - }, - { - "@id": "schema:distribution", - "@type": "rdf:Property", - "domainIncludes": "schema:Dataset", - "rangeIncludes": "schema:DataDownload", - "rdfs:comment": "A downloadable form of this dataset, at a specific location, in a specific format.", - "rdfs:label": "distribution" - }, - { - "@id": "schema:spatial", - "@type": "rdf:Property", - "domainIncludes": "schema:Dataset", - "rangeIncludes": "schema:Place", - "rdfs:comment": "The range of spatial applicability of a dataset, e.g. for a dataset of New York weather, the state of New York.", - "rdfs:label": "spatial" - }, - { - "@id": "schema:temporal", - "@type": "rdf:Property", - "domainIncludes": "schema:Dataset", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The range of temporal applicability of a dataset, e.g. for a 2011 census dataset, the year 2011 (in ISO 8601 time interval format).", - "rdfs:label": "temporal" - } - ] - }, - { - "@id": "schema:Diet", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A strategy of regulating the intake of food to achieve or maintain a specific health-related goal.", - "rdfs:label": "Diet", - "rdfs:subClassOf": [ - "schema:CreativeWork", - "schema:LifestyleModification" - ], - "properties": [ - { - "@id": "schema:dietFeatures", - "@type": "rdf:Property", - "domainIncludes": "schema:Diet", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Nutritional information specific to the dietary plan. May include dietary recommendations on what foods to avoid, what foods to consume, and specific alterations/deviations from the USDA or other regulatory body's approved dietary guidelines.", - "rdfs:label": "dietFeatures" - }, - { - "@id": "schema:endorsers", - "@type": "rdf:Property", - "domainIncludes": "schema:Diet", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "People or organizations that endorse the plan.", - "rdfs:label": "endorsers" - }, - { - "@id": "schema:expertConsiderations", - "@type": "rdf:Property", - "domainIncludes": "schema:Diet", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Medical expert advice related to the plan.", - "rdfs:label": "expertConsiderations" - }, - { - "@id": "schema:overview", - "@type": "rdf:Property", - "domainIncludes": "schema:Diet", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Descriptive information establishing the overarching theory/philosophy of the plan. May include the rationale for the name, the population where the plan first came to prominence, etc.", - "rdfs:label": "overview" - }, - { - "@id": "schema:physiologicalBenefits", - "@type": "rdf:Property", - "domainIncludes": "schema:Diet", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Specific physiologic benefits associated to the plan.", - "rdfs:label": "physiologicalBenefits" - }, - { - "@id": "schema:proprietaryName", - "@type": "rdf:Property", - "domainIncludes": "schema:Diet", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Proprietary name given to the diet plan, typically by its originator or creator.", - "rdfs:label": "proprietaryName" - }, - { - "@id": "schema:risks", - "@type": "rdf:Property", - "domainIncludes": "schema:Diet", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Specific physiologic risks associated to the plan.", - "rdfs:label": "risks" - } - ] - }, - { - "@id": "schema:EmailMessage", - "@type": "rdfs:Class", - "rdfs:comment": "An email message.", - "rdfs:label": "EmailMessage", - "rdfs:subClassOf": "schema:CreativeWork" - }, - { - "@id": "schema:Episode", - "@type": "rdfs:Class", - "rdfs:comment": "A TV or radio episode which can be part of a series or season.", - "rdfs:label": "Episode", - "rdfs:subClassOf": "schema:CreativeWork", - "children": [ - { - "@id": "schema:RadioEpisode", - "@type": "rdfs:Class", - "rdfs:comment": "A radio episode which can be part of a series or season.", - "rdfs:label": "RadioEpisode", - "rdfs:subClassOf": "schema:Episode", - "properties": [ - { - "@id": "schema:actor", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "A cast member of the movie, tv/radio series, season, episode, or video.", - "rdfs:label": "actor" - }, - { - "@id": "schema:actors", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "A cast member of the movie, tv/radio series, season, episode, or video. (legacy spelling; see singular form, actor)", - "rdfs:label": "actors", - "supercededBy": "schema:actor" - }, - { - "@id": "schema:director", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The director of the movie, tv/radio episode or series.", - "rdfs:label": "director" - }, - { - "@id": "schema:directors", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The director of the movie, tv/radio episode or series. (legacy spelling; see singular form, director)", - "rdfs:label": "directors", - "supercededBy": "schema:director" - }, - { - "@id": "schema:musicBy", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": [ - "schema:MusicGroup", - "schema:Person" - ], - "rdfs:comment": "The composer of the movie or TV/radio soundtrack.", - "rdfs:label": "musicBy" - }, - { - "@id": "schema:partOfSeason", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:TVEpisode", - "schema:Clip", - "schema:RadioClip", - "schema:RadioEpisode", - "schema:TVClip" - ], - "rangeIncludes": "schema:Season", - "rdfs:comment": "The season to which this episode belongs.", - "rdfs:label": "partOfSeason" - }, - { - "@id": "schema:partOfSeries", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Clip", - "schema:RadioClip", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:TVClip" - ], - "rangeIncludes": "schema:Series", - "rdfs:comment": "The series to which this episode or season belongs.", - "rdfs:label": "partOfSeries" - }, - { - "@id": "schema:producer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The producer of the movie, tv/radio series, season, or episode, or video.", - "rdfs:label": "producer" - }, - { - "@id": "schema:productionCompany", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:VideoObject", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The production company or studio that made the movie, tv/radio series, season, or episode, or media object.", - "rdfs:label": "productionCompany" - }, - { - "@id": "schema:trailer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:VideoObject", - "rdfs:comment": "The trailer of a movie or tv/radio series, season, or episode.", - "rdfs:label": "trailer" - } - ] - }, - { - "@id": "schema:TVEpisode", - "@type": "rdfs:Class", - "rdfs:comment": "A TV episode which can be part of a series or season.", - "rdfs:label": "TVEpisode", - "rdfs:subClassOf": "schema:Episode", - "properties": [ - { - "@id": "schema:actor", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "A cast member of the movie, tv/radio series, season, episode, or video.", - "rdfs:label": "actor" - }, - { - "@id": "schema:actors", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "A cast member of the movie, tv/radio series, season, episode, or video. (legacy spelling; see singular form, actor)", - "rdfs:label": "actors", - "supercededBy": "schema:actor" - }, - { - "@id": "schema:director", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The director of the movie, tv/radio episode or series.", - "rdfs:label": "director" - }, - { - "@id": "schema:directors", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The director of the movie, tv/radio episode or series. (legacy spelling; see singular form, director)", - "rdfs:label": "directors", - "supercededBy": "schema:director" - }, - { - "@id": "schema:musicBy", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": [ - "schema:MusicGroup", - "schema:Person" - ], - "rdfs:comment": "The composer of the movie or TV/radio soundtrack.", - "rdfs:label": "musicBy" - }, - { - "@id": "schema:partOfSeason", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:TVEpisode", - "schema:Clip", - "schema:RadioClip", - "schema:RadioEpisode", - "schema:TVClip" - ], - "rangeIncludes": "schema:Season", - "rdfs:comment": "The season to which this episode belongs.", - "rdfs:label": "partOfSeason" - }, - { - "@id": "schema:partOfSeries", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Clip", - "schema:RadioClip", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:TVClip" - ], - "rangeIncludes": "schema:Series", - "rdfs:comment": "The series to which this episode or season belongs.", - "rdfs:label": "partOfSeries" - }, - { - "@id": "schema:partOfTVSeries", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:TVEpisode", - "schema:TVSeason", - "schema:TVClip" - ], - "rangeIncludes": "schema:TVSeries", - "rdfs:comment": "The TV series to which this episode or season belongs. (legacy form; partOfSeries is preferred)", - "rdfs:label": "partOfTVSeries", - "supercededBy": "schema:partOfSeries" - }, - { - "@id": "schema:producer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The producer of the movie, tv/radio series, season, or episode, or video.", - "rdfs:label": "producer" - }, - { - "@id": "schema:productionCompany", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:VideoObject", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The production company or studio that made the movie, tv/radio series, season, or episode, or media object.", - "rdfs:label": "productionCompany" - }, - { - "@id": "schema:trailer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:VideoObject", - "rdfs:comment": "The trailer of a movie or tv/radio series, season, or episode.", - "rdfs:label": "trailer" - } - ] - } - ], - "properties": [ - { - "@id": "schema:actor", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "A cast member of the movie, tv/radio series, season, episode, or video.", - "rdfs:label": "actor" - }, - { - "@id": "schema:actors", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "A cast member of the movie, tv/radio series, season, episode, or video. (legacy spelling; see singular form, actor)", - "rdfs:label": "actors", - "supercededBy": "schema:actor" - }, - { - "@id": "schema:director", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The director of the movie, tv/radio episode or series.", - "rdfs:label": "director" - }, - { - "@id": "schema:directors", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The director of the movie, tv/radio episode or series. (legacy spelling; see singular form, director)", - "rdfs:label": "directors", - "supercededBy": "schema:director" - }, - { - "@id": "schema:episodeNumber", - "@type": "rdf:Property", - "domainIncludes": "schema:Episode", - "rangeIncludes": "schema:Integer", - "rdfs:comment": "Position of the episode within an ordered group of episodes.", - "rdfs:label": "episodeNumber" - }, - { - "@id": "schema:musicBy", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": [ - "schema:MusicGroup", - "schema:Person" - ], - "rdfs:comment": "The composer of the movie or TV/radio soundtrack.", - "rdfs:label": "musicBy" - }, - { - "@id": "schema:partOfSeason", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:TVEpisode", - "schema:Clip", - "schema:RadioClip", - "schema:RadioEpisode", - "schema:TVClip" - ], - "rangeIncludes": "schema:Season", - "rdfs:comment": "The season to which this episode belongs.", - "rdfs:label": "partOfSeason" - }, - { - "@id": "schema:partOfSeries", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Clip", - "schema:RadioClip", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:TVClip" - ], - "rangeIncludes": "schema:Series", - "rdfs:comment": "The series to which this episode or season belongs.", - "rdfs:label": "partOfSeries" - }, - { - "@id": "schema:position", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:Season", - "schema:Clip" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "Free text to define other than pure numerical ranking of an episode or a season in an ordered list of items (further formatting restrictions may apply within particular user groups).", - "rdfs:label": "position" - }, - { - "@id": "schema:producer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The producer of the movie, tv/radio series, season, or episode, or video.", - "rdfs:label": "producer" - }, - { - "@id": "schema:productionCompany", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:VideoObject", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The production company or studio that made the movie, tv/radio series, season, or episode, or media object.", - "rdfs:label": "productionCompany" - }, - { - "@id": "schema:publication", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Episode", - "schema:Clip" - ], - "rangeIncludes": "schema:PublicationEvent", - "rdfs:comment": "A publication event associated with the episode, clip or media object.", - "rdfs:label": "publication" - }, - { - "@id": "schema:trailer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:VideoObject", - "rdfs:comment": "The trailer of a movie or tv/radio series, season, or episode.", - "rdfs:label": "trailer" - } - ] - }, - { - "@id": "schema:ExercisePlan", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Fitness-related activity designed for a specific health-related purpose, including defined exercise routines as well as activity prescribed by a clinician.", - "rdfs:label": "ExercisePlan", - "rdfs:subClassOf": [ - "schema:CreativeWork", - "schema:PhysicalActivity" - ], - "properties": [ - { - "@id": "schema:activityDuration", - "@type": "rdf:Property", - "domainIncludes": "schema:ExercisePlan", - "rangeIncludes": "schema:Duration", - "rdfs:comment": "Length of time to engage in the activity.", - "rdfs:label": "activityDuration" - }, - { - "@id": "schema:activityFrequency", - "@type": "rdf:Property", - "domainIncludes": "schema:ExercisePlan", - "rangeIncludes": "schema:Text", - "rdfs:comment": "How often one should engage in the activity.", - "rdfs:label": "activityFrequency" - }, - { - "@id": "schema:additionalVariable", - "@type": "rdf:Property", - "domainIncludes": "schema:ExercisePlan", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Any additional component of the exercise prescription that may need to be articulated to the patient. This may include the order of exercises, the number of repetitions of movement, quantitative distance, progressions over time, etc.", - "rdfs:label": "additionalVariable" - }, - { - "@id": "schema:exerciseType", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:ExercisePlan", - "schema:ExerciseAction" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "Type(s) of exercise or activity, such as strength training, flexibility training, aerobics, cardiac rehabilitation, etc.", - "rdfs:label": "exerciseType" - }, - { - "@id": "schema:intensity", - "@type": "rdf:Property", - "domainIncludes": "schema:ExercisePlan", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Quantitative measure gauging the degree of force involved in the exercise, for example, heartbeats per minute. May include the velocity of the movement.", - "rdfs:label": "intensity" - }, - { - "@id": "schema:repetitions", - "@type": "rdf:Property", - "domainIncludes": "schema:ExercisePlan", - "rangeIncludes": "schema:Number", - "rdfs:comment": "Number of times one should repeat the activity.", - "rdfs:label": "repetitions" - }, - { - "@id": "schema:restPeriods", - "@type": "rdf:Property", - "domainIncludes": "schema:ExercisePlan", - "rangeIncludes": "schema:Text", - "rdfs:comment": "How often one should break from the activity.", - "rdfs:label": "restPeriods" - }, - { - "@id": "schema:workload", - "@type": "rdf:Property", - "domainIncludes": "schema:ExercisePlan", - "rangeIncludes": "schema:Energy", - "rdfs:comment": "Quantitative measure of the physiologic output of the exercise; also referred to as energy expenditure.", - "rdfs:label": "workload" - } - ] - }, - { - "@id": "schema:ItemList", - "@type": "rdfs:Class", - "rdfs:comment": "A list of items of any sort—for example, Top 10 Movies About Weathermen, or Top 100 Party Songs. Not to be confused with HTML lists, which are often used only for formatting.", - "rdfs:label": "ItemList", - "rdfs:subClassOf": "schema:CreativeWork", - "properties": [ - { - "@id": "schema:itemListElement", - "@type": "rdf:Property", - "domainIncludes": "schema:ItemList", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A single list item.", - "rdfs:label": "itemListElement" - }, - { - "@id": "schema:itemListOrder", - "@type": "rdf:Property", - "domainIncludes": "schema:ItemList", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Type of ordering (e.g. Ascending, Descending, Unordered).", - "rdfs:label": "itemListOrder" - } - ] - }, - { - "@id": "schema:Map", - "@type": "rdfs:Class", - "rdfs:comment": "A map.", - "rdfs:label": "Map", - "rdfs:subClassOf": "schema:CreativeWork" - }, - { - "@id": "schema:MediaObject", - "@type": "rdfs:Class", - "rdfs:comment": "An image, video, or audio object embedded in a web page. Note that a creative work may have many media objects associated with it on the same web page. For example, a page about a single song (MusicRecording) may have a music video (VideoObject), and a high and low bandwidth audio stream (2 AudioObject's).", - "rdfs:label": "MediaObject", - "rdfs:subClassOf": "schema:CreativeWork", - "children": [ - { - "@id": "schema:AudioObject", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_rNews" - }, - "rdfs:comment": "An audio file.", - "rdfs:label": "AudioObject", - "rdfs:subClassOf": "schema:MediaObject", - "properties": [ - { - "@id": "schema:transcript", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:AudioObject", - "schema:VideoObject" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "If this MediaObject is an AudioObject or VideoObject, the transcript of that object.", - "rdfs:label": "transcript" - } - ] - }, - { - "@id": "schema:DataDownload", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_DatasetClass" - }, - "owl:equivalentClass": "dcat:Distribution", - "rdfs:comment": "A dataset in downloadable form.", - "rdfs:label": "DataDownload", - "rdfs:subClassOf": "schema:MediaObject" - }, - { - "@id": "schema:ImageObject", - "@type": "rdfs:Class", - "owl:equivalentClass": "dc:Image", - "rdfs:comment": "An image file.", - "rdfs:label": "ImageObject", - "rdfs:subClassOf": "schema:MediaObject", - "properties": [ - { - "@id": "schema:caption", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:ImageObject", - "schema:VideoObject" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The caption for this object.", - "rdfs:label": "caption" - }, - { - "@id": "schema:exifData", - "@type": "rdf:Property", - "domainIncludes": "schema:ImageObject", - "rangeIncludes": "schema:Text", - "rdfs:comment": "exif data for this object.", - "rdfs:label": "exifData" - }, - { - "@id": "schema:representativeOfPage", - "@type": "rdf:Property", - "domainIncludes": "schema:ImageObject", - "rangeIncludes": "schema:Boolean", - "rdfs:comment": "Indicates whether this image is representative of the content of the page.", - "rdfs:label": "representativeOfPage" - }, - { - "@id": "schema:thumbnail", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:ImageObject", - "schema:VideoObject" - ], - "rangeIncludes": "schema:ImageObject", - "rdfs:comment": "Thumbnail image for an image or video.", - "rdfs:label": "thumbnail" - } - ] - }, - { - "@id": "schema:MusicVideoObject", - "@type": "rdfs:Class", - "rdfs:comment": "A music video file.", - "rdfs:label": "MusicVideoObject", - "rdfs:subClassOf": "schema:MediaObject" - }, - { - "@id": "schema:VideoObject", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_rNews" - }, - "rdfs:comment": "A video file.", - "rdfs:label": "VideoObject", - "rdfs:subClassOf": "schema:MediaObject", - "properties": [ - { - "@id": "schema:caption", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:ImageObject", - "schema:VideoObject" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The caption for this object.", - "rdfs:label": "caption" - }, - { - "@id": "schema:productionCompany", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:VideoObject", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The production company or studio that made the movie, tv/radio series, season, or episode, or media object.", - "rdfs:label": "productionCompany" - }, - { - "@id": "schema:thumbnail", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:ImageObject", - "schema:VideoObject" - ], - "rangeIncludes": "schema:ImageObject", - "rdfs:comment": "Thumbnail image for an image or video.", - "rdfs:label": "thumbnail" - }, - { - "@id": "schema:transcript", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:AudioObject", - "schema:VideoObject" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "If this MediaObject is an AudioObject or VideoObject, the transcript of that object.", - "rdfs:label": "transcript" - }, - { - "@id": "schema:videoFrameSize", - "@type": "rdf:Property", - "domainIncludes": "schema:VideoObject", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The frame size of the video.", - "rdfs:label": "videoFrameSize" - }, - { - "@id": "schema:videoQuality", - "@type": "rdf:Property", - "domainIncludes": "schema:VideoObject", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The quality of the video.", - "rdfs:label": "videoQuality" - } - ] - } - ], - "properties": [ - { - "@id": "schema:associatedArticle", - "@type": "rdf:Property", - "domainIncludes": "schema:MediaObject", - "rangeIncludes": "schema:NewsArticle", - "rdfs:comment": "A NewsArticle associated with the Media Object.", - "rdfs:label": "associatedArticle" - }, - { - "@id": "schema:bitrate", - "@type": "rdf:Property", - "domainIncludes": "schema:MediaObject", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The bitrate of the media object.", - "rdfs:label": "bitrate" - }, - { - "@id": "schema:contentSize", - "@type": "rdf:Property", - "domainIncludes": "schema:MediaObject", - "rangeIncludes": "schema:Text", - "rdfs:comment": "File size in (mega/kilo) bytes.", - "rdfs:label": "contentSize" - }, - { - "@id": "schema:contentUrl", - "@type": "rdf:Property", - "domainIncludes": "schema:MediaObject", - "rangeIncludes": "schema:URL", - "rdfs:comment": "Actual bytes of the media object, for example the image file or video file. (previous spelling: contentURL)", - "rdfs:label": "contentUrl" - }, - { - "@id": "schema:duration", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Event", - "schema:Movie", - "schema:MusicRecording" - ], - "rangeIncludes": "schema:Duration", - "rdfs:comment": "The duration of the item (movie, audio recording, event, etc.) in ISO 8601 date format.", - "rdfs:label": "duration" - }, - { - "@id": "schema:embedUrl", - "@type": "rdf:Property", - "domainIncludes": "schema:MediaObject", - "rangeIncludes": "schema:URL", - "rdfs:comment": "A URL pointing to a player for a specific video. In general, this is the information in the src element of an embed tag and should not be the same as the content of the loc tag. (previous spelling: embedURL)", - "rdfs:label": "embedUrl" - }, - { - "@id": "schema:encodesCreativeWork", - "@type": "rdf:Property", - "domainIncludes": "schema:MediaObject", - "rangeIncludes": "schema:CreativeWork", - "rdfs:comment": "The creative work encoded by this media object", - "rdfs:label": "encodesCreativeWork" - }, - { - "@id": "schema:encodingFormat", - "@type": "rdf:Property", - "domainIncludes": "schema:MediaObject", - "rangeIncludes": "schema:Text", - "rdfs:comment": "mp3, mpeg4, etc.", - "rdfs:label": "encodingFormat" - }, - { - "@id": "schema:expires", - "@type": "rdf:Property", - "domainIncludes": "schema:MediaObject", - "rangeIncludes": "schema:Date", - "rdfs:comment": "Date the content expires and is no longer useful or available. Useful for videos.", - "rdfs:label": "expires" - }, - { - "@id": "schema:height", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Product" - ], - "rangeIncludes": [ - "schema:Distance", - "schema:QuantitativeValue" - ], - "rdfs:comment": "The height of the item.", - "rdfs:label": "height" - }, - { - "@id": "schema:interactionCount", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:MediaObject", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "A count of a specific user interactions with this item—for example, 20 UserLikes, 5 UserComments, or 300 UserDownloads. The user interaction type should be one of the sub types of UserInteraction.", - "rdfs:label": "interactionCount" - }, - { - "@id": "schema:offers", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:MediaObject", - "schema:Event", - "schema:Product" - ], - "rangeIncludes": "schema:Offer", - "rdfs:comment": "An offer to provide this item—for example, an offer to sell a product, rent the DVD of a movie, or give away tickets to an event.", - "rdfs:label": "offers" - }, - { - "@id": "schema:playerType", - "@type": "rdf:Property", - "domainIncludes": "schema:MediaObject", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Player type required—for example, Flash or Silverlight.", - "rdfs:label": "playerType" - }, - { - "@id": "schema:productionCompany", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:VideoObject", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The production company or studio that made the movie, tv/radio series, season, or episode, or media object.", - "rdfs:label": "productionCompany" - }, - { - "@id": "schema:publication", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Episode", - "schema:Clip" - ], - "rangeIncludes": "schema:PublicationEvent", - "rdfs:comment": "A publication event associated with the episode, clip or media object.", - "rdfs:label": "publication" - }, - { - "@id": "schema:regionsAllowed", - "@type": "rdf:Property", - "domainIncludes": "schema:MediaObject", - "rangeIncludes": "schema:Place", - "rdfs:comment": "The regions where the media is allowed. If not specified, then it's assumed to be allowed everywhere. Specify the countries in ISO 3166 format.", - "rdfs:label": "regionsAllowed" - }, - { - "@id": "schema:requiresSubscription", - "@type": "rdf:Property", - "domainIncludes": "schema:MediaObject", - "rangeIncludes": "schema:Boolean", - "rdfs:comment": "Indicates if use of the media require a subscription (either paid or free). Allowed values are true or false (note that an earlier version had 'yes', 'no').", - "rdfs:label": "requiresSubscription" - }, - { - "@id": "schema:uploadDate", - "@type": "rdf:Property", - "domainIncludes": "schema:MediaObject", - "rangeIncludes": "schema:Date", - "rdfs:comment": "Date when this media object was uploaded to this site.", - "rdfs:label": "uploadDate" - }, - { - "@id": "schema:width", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Product" - ], - "rangeIncludes": [ - "schema:Distance", - "schema:QuantitativeValue" - ], - "rdfs:comment": "The width of the item.", - "rdfs:label": "width" - } - ] - }, - { - "@id": "schema:Movie", - "@type": "rdfs:Class", - "rdfs:comment": "A movie.", - "rdfs:label": "Movie", - "rdfs:subClassOf": "schema:CreativeWork", - "properties": [ - { - "@id": "schema:actor", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "A cast member of the movie, tv/radio series, season, episode, or video.", - "rdfs:label": "actor" - }, - { - "@id": "schema:actors", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "A cast member of the movie, tv/radio series, season, episode, or video. (legacy spelling; see singular form, actor)", - "rdfs:label": "actors", - "supercededBy": "schema:actor" - }, - { - "@id": "schema:director", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The director of the movie, tv/radio episode or series.", - "rdfs:label": "director" - }, - { - "@id": "schema:directors", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The director of the movie, tv/radio episode or series. (legacy spelling; see singular form, director)", - "rdfs:label": "directors", - "supercededBy": "schema:director" - }, - { - "@id": "schema:duration", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Event", - "schema:Movie", - "schema:MusicRecording" - ], - "rangeIncludes": "schema:Duration", - "rdfs:comment": "The duration of the item (movie, audio recording, event, etc.) in ISO 8601 date format.", - "rdfs:label": "duration" - }, - { - "@id": "schema:musicBy", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": [ - "schema:MusicGroup", - "schema:Person" - ], - "rdfs:comment": "The composer of the movie or TV/radio soundtrack.", - "rdfs:label": "musicBy" - }, - { - "@id": "schema:producer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The producer of the movie, tv/radio series, season, or episode, or video.", - "rdfs:label": "producer" - }, - { - "@id": "schema:productionCompany", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:VideoObject", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The production company or studio that made the movie, tv/radio series, season, or episode, or media object.", - "rdfs:label": "productionCompany" - }, - { - "@id": "schema:trailer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:VideoObject", - "rdfs:comment": "The trailer of a movie or tv/radio series, season, or episode.", - "rdfs:label": "trailer" - } - ] - }, - { - "@id": "schema:MusicPlaylist", - "@type": "rdfs:Class", - "rdfs:comment": "A collection of music tracks in playlist form.", - "rdfs:label": "MusicPlaylist", - "rdfs:subClassOf": "schema:CreativeWork", - "children": [ - { - "@id": "schema:MusicAlbum", - "@type": "rdfs:Class", - "rdfs:comment": "A collection of music tracks.", - "rdfs:label": "MusicAlbum", - "rdfs:subClassOf": "schema:MusicPlaylist", - "properties": [ - { - "@id": "schema:byArtist", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MusicAlbum", - "schema:MusicRecording" - ], - "rangeIncludes": "schema:MusicGroup", - "rdfs:comment": "The artist that performed this album or recording.", - "rdfs:label": "byArtist" - } - ] - } - ], - "properties": [ - { - "@id": "schema:numTracks", - "@type": "rdf:Property", - "domainIncludes": "schema:MusicPlaylist", - "rangeIncludes": "schema:Integer", - "rdfs:comment": "The number of tracks in this album or playlist.", - "rdfs:label": "numTracks" - }, - { - "@id": "schema:track", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MusicPlaylist", - "schema:MusicGroup" - ], - "rangeIncludes": "schema:MusicRecording", - "rdfs:comment": "A music recording (track)—usually a single song.", - "rdfs:label": "track" - }, - { - "@id": "schema:tracks", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MusicPlaylist", - "schema:MusicGroup" - ], - "rangeIncludes": "schema:MusicRecording", - "rdfs:comment": "A music recording (track)—usually a single song (legacy spelling; see singular form, track).", - "rdfs:label": "tracks", - "supercededBy": "schema:track" - } - ] - }, - { - "@id": "schema:MusicRecording", - "@type": "rdfs:Class", - "rdfs:comment": "A music recording (track), usually a single song.", - "rdfs:label": "MusicRecording", - "rdfs:subClassOf": "schema:CreativeWork", - "properties": [ - { - "@id": "schema:byArtist", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MusicAlbum", - "schema:MusicRecording" - ], - "rangeIncludes": "schema:MusicGroup", - "rdfs:comment": "The artist that performed this album or recording.", - "rdfs:label": "byArtist" - }, - { - "@id": "schema:duration", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Event", - "schema:Movie", - "schema:MusicRecording" - ], - "rangeIncludes": "schema:Duration", - "rdfs:comment": "The duration of the item (movie, audio recording, event, etc.) in ISO 8601 date format.", - "rdfs:label": "duration" - }, - { - "@id": "schema:inAlbum", - "@type": "rdf:Property", - "domainIncludes": "schema:MusicRecording", - "rangeIncludes": "schema:MusicAlbum", - "rdfs:comment": "The album to which this recording belongs.", - "rdfs:label": "inAlbum" - }, - { - "@id": "schema:inPlaylist", - "@type": "rdf:Property", - "domainIncludes": "schema:MusicRecording", - "rangeIncludes": "schema:MusicPlaylist", - "rdfs:comment": "The playlist to which this recording belongs.", - "rdfs:label": "inPlaylist" - } - ] - }, - { - "@id": "schema:Painting", - "@type": "rdfs:Class", - "rdfs:comment": "A painting.", - "rdfs:label": "Painting", - "rdfs:subClassOf": "schema:CreativeWork" - }, - { - "@id": "schema:Photograph", - "@type": "rdfs:Class", - "rdfs:comment": "A photograph.", - "rdfs:label": "Photograph", - "rdfs:subClassOf": "schema:CreativeWork" - }, - { - "@id": "schema:Question", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_QAStackExchange" - }, - "rdfs:comment": "A specific question - e.g. from a user seeking answers online, or collected in a Frequently Asked Questions (FAQ) document.", - "rdfs:label": "Question", - "rdfs:subClassOf": "schema:CreativeWork", - "properties": [ - { - "@id": "schema:acceptedAnswer", - "@type": "rdf:Property", - "domainIncludes": "schema:Question", - "rangeIncludes": "schema:Answer", - "rdfs:comment": "The answer that has been accepted as best, typically on a Question/Answer site. Sites vary in their selection mechanisms, e.g. drawing on community opinion and/or the view of the Question author.", - "rdfs:label": "acceptedAnswer", - "rdfs:subPropertyOf": "schema:suggestedAnswer" - }, - { - "@id": "schema:answerCount", - "@type": "rdf:Property", - "domainIncludes": "schema:Question", - "rangeIncludes": "schema:Integer", - "rdfs:comment": "The number of answers this question has received.", - "rdfs:label": "answerCount" - }, - { - "@id": "schema:downvoteCount", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Question", - "schema:Answer", - "schema:Comment" - ], - "rangeIncludes": "schema:Integer", - "rdfs:comment": "The number of downvotes this question has received from the community.", - "rdfs:label": "downvoteCount" - }, - { - "@id": "schema:suggestedAnswer", - "@type": "rdf:Property", - "domainIncludes": "schema:Question", - "rangeIncludes": "schema:Answer", - "rdfs:comment": "An answer (possibly one of several, possibly incorrect) to a Question, e.g. on a Question/Answer site.", - "rdfs:label": "suggestedAnswer" - }, - { - "@id": "schema:upvoteCount", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Question", - "schema:Answer", - "schema:Comment" - ], - "rangeIncludes": "schema:Integer", - "rdfs:comment": "The number of upvotes this question has received from the community.", - "rdfs:label": "upvoteCount" - } - ] - }, - { - "@id": "schema:Recipe", - "@type": "rdfs:Class", - "rdfs:comment": "A recipe.", - "rdfs:label": "Recipe", - "rdfs:subClassOf": "schema:CreativeWork", - "properties": [ - { - "@id": "schema:cookTime", - "@type": "rdf:Property", - "domainIncludes": "schema:Recipe", - "rangeIncludes": "schema:Duration", - "rdfs:comment": "The time it takes to actually cook the dish, in ISO 8601 duration format.", - "rdfs:label": "cookTime" - }, - { - "@id": "schema:cookingMethod", - "@type": "rdf:Property", - "domainIncludes": "schema:Recipe", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The method of cooking, such as Frying, Steaming, ...", - "rdfs:label": "cookingMethod" - }, - { - "@id": "schema:ingredients", - "@type": "rdf:Property", - "domainIncludes": "schema:Recipe", - "rangeIncludes": "schema:Text", - "rdfs:comment": "An ingredient used in the recipe.", - "rdfs:label": "ingredients" - }, - { - "@id": "schema:nutrition", - "@type": "rdf:Property", - "domainIncludes": "schema:Recipe", - "rangeIncludes": "schema:NutritionInformation", - "rdfs:comment": "Nutrition information about the recipe.", - "rdfs:label": "nutrition" - }, - { - "@id": "schema:prepTime", - "@type": "rdf:Property", - "domainIncludes": "schema:Recipe", - "rangeIncludes": "schema:Duration", - "rdfs:comment": "The length of time it takes to prepare the recipe, in ISO 8601 duration format.", - "rdfs:label": "prepTime" - }, - { - "@id": "schema:recipeCategory", - "@type": "rdf:Property", - "domainIncludes": "schema:Recipe", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The category of the recipe—for example, appetizer, entree, etc.", - "rdfs:label": "recipeCategory" - }, - { - "@id": "schema:recipeCuisine", - "@type": "rdf:Property", - "domainIncludes": "schema:Recipe", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The cuisine of the recipe (for example, French or Ethiopian).", - "rdfs:label": "recipeCuisine" - }, - { - "@id": "schema:recipeInstructions", - "@type": "rdf:Property", - "domainIncludes": "schema:Recipe", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The steps to make the dish.", - "rdfs:label": "recipeInstructions" - }, - { - "@id": "schema:recipeYield", - "@type": "rdf:Property", - "domainIncludes": "schema:Recipe", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The quantity produced by the recipe (for example, number of people served, number of servings, etc).", - "rdfs:label": "recipeYield" - }, - { - "@id": "schema:totalTime", - "@type": "rdf:Property", - "domainIncludes": "schema:Recipe", - "rangeIncludes": "schema:Duration", - "rdfs:comment": "The total time it takes to prepare and cook the recipe, in ISO 8601 duration format.", - "rdfs:label": "totalTime" - } - ] - }, - { - "@id": "schema:Review", - "@type": "rdfs:Class", - "rdfs:comment": "A review of an item - for example, a restaurant, movie, or store.", - "rdfs:label": "Review", - "rdfs:subClassOf": "schema:CreativeWork", - "properties": [ - { - "@id": "schema:itemReviewed", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:AggregateRating", - "schema:Review" - ], - "rangeIncludes": "schema:Thing", - "rdfs:comment": "The item that is being reviewed/rated.", - "rdfs:label": "itemReviewed" - }, - { - "@id": "schema:reviewBody", - "@type": "rdf:Property", - "domainIncludes": "schema:Review", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The actual body of the review.", - "rdfs:label": "reviewBody" - }, - { - "@id": "schema:reviewRating", - "@type": "rdf:Property", - "domainIncludes": "schema:Review", - "rangeIncludes": "schema:Rating", - "rdfs:comment": "The rating given in this review. Note that reviews can themselves be rated. The reviewRating applies to rating given by the review. The aggregateRating property applies to the review itself, as a creative work.", - "rdfs:label": "reviewRating" - } - ] - }, - { - "@id": "schema:Sculpture", - "@type": "rdfs:Class", - "rdfs:comment": "A piece of sculpture.", - "rdfs:label": "Sculpture", - "rdfs:subClassOf": "schema:CreativeWork" - }, - { - "@id": "schema:Season", - "@type": "rdfs:Class", - "rdfs:comment": "A TV or radio season.", - "rdfs:label": "Season", - "rdfs:subClassOf": "schema:CreativeWork", - "children": [ - { - "@id": "schema:RadioSeason", - "@type": "rdfs:Class", - "rdfs:comment": "Season dedicated to radio broadcast and associated online delivery.", - "rdfs:label": "RadioSeason", - "rdfs:subClassOf": "schema:Season", - "properties": [ - { - "@id": "schema:episode", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Episode", - "rdfs:comment": "An episode of a TV/radio series or season", - "rdfs:label": "episode" - }, - { - "@id": "schema:episodes", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Episode", - "rdfs:comment": "An episode of a TV/radio series or season (legacy spelling; see singular form, episode)", - "rdfs:label": "episodes", - "supercededBy": "schema:episode" - }, - { - "@id": "schema:numberOfEpisodes", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Number", - "rdfs:comment": "The number of episodes in this season or series.", - "rdfs:label": "numberOfEpisodes" - }, - { - "@id": "schema:partOfSeries", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Clip", - "schema:RadioClip", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:TVClip" - ], - "rangeIncludes": "schema:Series", - "rdfs:comment": "The series to which this episode or season belongs.", - "rdfs:label": "partOfSeries" - }, - { - "@id": "schema:trailer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:VideoObject", - "rdfs:comment": "The trailer of a movie or tv/radio series, season, or episode.", - "rdfs:label": "trailer" - } - ] - }, - { - "@id": "schema:TVSeason", - "@type": "rdfs:Class", - "rdfs:comment": "Season dedicated to TV broadcast and associated online delivery.", - "rdfs:label": "TVSeason", - "rdfs:subClassOf": [ - "schema:CreativeWork", - "schema:Season" - ], - "properties": [ - { - "@id": "schema:endDate", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Role", - "schema:Event", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries" - ], - "rangeIncludes": "schema:Date", - "rdfs:comment": "The end date and time of the role, event or item (in ISO 8601 date format).", - "rdfs:label": "endDate" - }, - { - "@id": "schema:episode", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Episode", - "rdfs:comment": "An episode of a TV/radio series or season", - "rdfs:label": "episode" - }, - { - "@id": "schema:episodes", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Episode", - "rdfs:comment": "An episode of a TV/radio series or season (legacy spelling; see singular form, episode)", - "rdfs:label": "episodes", - "supercededBy": "schema:episode" - }, - { - "@id": "schema:numberOfEpisodes", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Number", - "rdfs:comment": "The number of episodes in this season or series.", - "rdfs:label": "numberOfEpisodes" - }, - { - "@id": "schema:partOfSeries", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Clip", - "schema:RadioClip", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:TVClip" - ], - "rangeIncludes": "schema:Series", - "rdfs:comment": "The series to which this episode or season belongs.", - "rdfs:label": "partOfSeries" - }, - { - "@id": "schema:partOfTVSeries", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:TVEpisode", - "schema:TVSeason", - "schema:TVClip" - ], - "rangeIncludes": "schema:TVSeries", - "rdfs:comment": "The TV series to which this episode or season belongs. (legacy form; partOfSeries is preferred)", - "rdfs:label": "partOfTVSeries", - "supercededBy": "schema:partOfSeries" - }, - { - "@id": "schema:seasonNumber", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason" - ], - "rangeIncludes": "schema:Integer", - "rdfs:comment": "Position of the season within an ordered group of seasons.", - "rdfs:label": "seasonNumber" - }, - { - "@id": "schema:startDate", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Role", - "schema:Event", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries" - ], - "rangeIncludes": "schema:Date", - "rdfs:comment": "The start date and time of the event, role or item (in ISO 8601 date format).", - "rdfs:label": "startDate" - }, - { - "@id": "schema:trailer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:VideoObject", - "rdfs:comment": "The trailer of a movie or tv/radio series, season, or episode.", - "rdfs:label": "trailer" - } - ] - } - ], - "properties": [ - { - "@id": "schema:endDate", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Role", - "schema:Event", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries" - ], - "rangeIncludes": "schema:Date", - "rdfs:comment": "The end date and time of the role, event or item (in ISO 8601 date format).", - "rdfs:label": "endDate" - }, - { - "@id": "schema:episode", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Episode", - "rdfs:comment": "An episode of a TV/radio series or season", - "rdfs:label": "episode" - }, - { - "@id": "schema:episodes", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Episode", - "rdfs:comment": "An episode of a TV/radio series or season (legacy spelling; see singular form, episode)", - "rdfs:label": "episodes", - "supercededBy": "schema:episode" - }, - { - "@id": "schema:numberOfEpisodes", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Number", - "rdfs:comment": "The number of episodes in this season or series.", - "rdfs:label": "numberOfEpisodes" - }, - { - "@id": "schema:partOfSeries", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Clip", - "schema:RadioClip", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:TVClip" - ], - "rangeIncludes": "schema:Series", - "rdfs:comment": "The series to which this episode or season belongs.", - "rdfs:label": "partOfSeries" - }, - { - "@id": "schema:position", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:Season", - "schema:Clip" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "Free text to define other than pure numerical ranking of an episode or a season in an ordered list of items (further formatting restrictions may apply within particular user groups).", - "rdfs:label": "position" - }, - { - "@id": "schema:producer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The producer of the movie, tv/radio series, season, or episode, or video.", - "rdfs:label": "producer" - }, - { - "@id": "schema:productionCompany", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:VideoObject", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The production company or studio that made the movie, tv/radio series, season, or episode, or media object.", - "rdfs:label": "productionCompany" - }, - { - "@id": "schema:seasonNumber", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason" - ], - "rangeIncludes": "schema:Integer", - "rdfs:comment": "Position of the season within an ordered group of seasons.", - "rdfs:label": "seasonNumber" - }, - { - "@id": "schema:startDate", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Role", - "schema:Event", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries" - ], - "rangeIncludes": "schema:Date", - "rdfs:comment": "The start date and time of the event, role or item (in ISO 8601 date format).", - "rdfs:label": "startDate" - }, - { - "@id": "schema:trailer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:VideoObject", - "rdfs:comment": "The trailer of a movie or tv/radio series, season, or episode.", - "rdfs:label": "trailer" - } - ] - }, - { - "@id": "schema:Series", - "@type": "rdfs:Class", - "rdfs:comment": "A TV or radio series.", - "rdfs:label": "Series", - "rdfs:subClassOf": "schema:CreativeWork", - "children": [ - { - "@id": "schema:RadioSeries", - "@type": "rdfs:Class", - "rdfs:comment": "Series dedicated to radio broadcast and associated online delivery.", - "rdfs:label": "RadioSeries", - "rdfs:subClassOf": "schema:Series", - "properties": [ - { - "@id": "schema:actor", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "A cast member of the movie, tv/radio series, season, episode, or video.", - "rdfs:label": "actor" - }, - { - "@id": "schema:actors", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "A cast member of the movie, tv/radio series, season, episode, or video. (legacy spelling; see singular form, actor)", - "rdfs:label": "actors", - "supercededBy": "schema:actor" - }, - { - "@id": "schema:director", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The director of the movie, tv/radio episode or series.", - "rdfs:label": "director" - }, - { - "@id": "schema:directors", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The director of the movie, tv/radio episode or series. (legacy spelling; see singular form, director)", - "rdfs:label": "directors", - "supercededBy": "schema:director" - }, - { - "@id": "schema:episode", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Episode", - "rdfs:comment": "An episode of a TV/radio series or season", - "rdfs:label": "episode" - }, - { - "@id": "schema:episodes", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Episode", - "rdfs:comment": "An episode of a TV/radio series or season (legacy spelling; see singular form, episode)", - "rdfs:label": "episodes", - "supercededBy": "schema:episode" - }, - { - "@id": "schema:musicBy", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": [ - "schema:MusicGroup", - "schema:Person" - ], - "rdfs:comment": "The composer of the movie or TV/radio soundtrack.", - "rdfs:label": "musicBy" - }, - { - "@id": "schema:numberOfEpisodes", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Number", - "rdfs:comment": "The number of episodes in this season or series.", - "rdfs:label": "numberOfEpisodes" - }, - { - "@id": "schema:producer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The producer of the movie, tv/radio series, season, or episode, or video.", - "rdfs:label": "producer" - }, - { - "@id": "schema:productionCompany", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:VideoObject", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The production company or studio that made the movie, tv/radio series, season, or episode, or media object.", - "rdfs:label": "productionCompany" - }, - { - "@id": "schema:season", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Series", - "schema:TVSeries", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Season", - "rdfs:comment": "A season in a tv/radio series.", - "rdfs:label": "season" - }, - { - "@id": "schema:seasons", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Series", - "schema:TVSeries", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Season", - "rdfs:comment": "A season in a tv/radio series. (legacy spelling; see singular form, season)", - "rdfs:label": "seasons", - "supercededBy": "schema:season" - }, - { - "@id": "schema:trailer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:VideoObject", - "rdfs:comment": "The trailer of a movie or tv/radio series, season, or episode.", - "rdfs:label": "trailer" - } - ] - }, - { - "@id": "schema:TVSeries", - "@type": "rdfs:Class", - "rdfs:comment": "Series dedicated to TV broadcast and associated online delivery.", - "rdfs:label": "TVSeries", - "rdfs:subClassOf": [ - "schema:CreativeWork", - "schema:Series" - ], - "properties": [ - { - "@id": "schema:actor", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "A cast member of the movie, tv/radio series, season, episode, or video.", - "rdfs:label": "actor" - }, - { - "@id": "schema:actors", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "A cast member of the movie, tv/radio series, season, episode, or video. (legacy spelling; see singular form, actor)", - "rdfs:label": "actors", - "supercededBy": "schema:actor" - }, - { - "@id": "schema:director", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The director of the movie, tv/radio episode or series.", - "rdfs:label": "director" - }, - { - "@id": "schema:directors", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The director of the movie, tv/radio episode or series. (legacy spelling; see singular form, director)", - "rdfs:label": "directors", - "supercededBy": "schema:director" - }, - { - "@id": "schema:endDate", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Role", - "schema:Event", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries" - ], - "rangeIncludes": "schema:Date", - "rdfs:comment": "The end date and time of the role, event or item (in ISO 8601 date format).", - "rdfs:label": "endDate" - }, - { - "@id": "schema:episode", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Episode", - "rdfs:comment": "An episode of a TV/radio series or season", - "rdfs:label": "episode" - }, - { - "@id": "schema:episodes", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Episode", - "rdfs:comment": "An episode of a TV/radio series or season (legacy spelling; see singular form, episode)", - "rdfs:label": "episodes", - "supercededBy": "schema:episode" - }, - { - "@id": "schema:musicBy", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": [ - "schema:MusicGroup", - "schema:Person" - ], - "rdfs:comment": "The composer of the movie or TV/radio soundtrack.", - "rdfs:label": "musicBy" - }, - { - "@id": "schema:numberOfEpisodes", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Number", - "rdfs:comment": "The number of episodes in this season or series.", - "rdfs:label": "numberOfEpisodes" - }, - { - "@id": "schema:producer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The producer of the movie, tv/radio series, season, or episode, or video.", - "rdfs:label": "producer" - }, - { - "@id": "schema:productionCompany", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:VideoObject", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The production company or studio that made the movie, tv/radio series, season, or episode, or media object.", - "rdfs:label": "productionCompany" - }, - { - "@id": "schema:season", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Series", - "schema:TVSeries", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Season", - "rdfs:comment": "A season in a tv/radio series.", - "rdfs:label": "season" - }, - { - "@id": "schema:seasons", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Series", - "schema:TVSeries", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Season", - "rdfs:comment": "A season in a tv/radio series. (legacy spelling; see singular form, season)", - "rdfs:label": "seasons", - "supercededBy": "schema:season" - }, - { - "@id": "schema:startDate", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Role", - "schema:Event", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries" - ], - "rangeIncludes": "schema:Date", - "rdfs:comment": "The start date and time of the event, role or item (in ISO 8601 date format).", - "rdfs:label": "startDate" - }, - { - "@id": "schema:trailer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:VideoObject", - "rdfs:comment": "The trailer of a movie or tv/radio series, season, or episode.", - "rdfs:label": "trailer" - } - ] - } - ], - "properties": [ - { - "@id": "schema:actor", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "A cast member of the movie, tv/radio series, season, episode, or video.", - "rdfs:label": "actor" - }, - { - "@id": "schema:actors", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "A cast member of the movie, tv/radio series, season, episode, or video. (legacy spelling; see singular form, actor)", - "rdfs:label": "actors", - "supercededBy": "schema:actor" - }, - { - "@id": "schema:director", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The director of the movie, tv/radio episode or series.", - "rdfs:label": "director" - }, - { - "@id": "schema:directors", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The director of the movie, tv/radio episode or series. (legacy spelling; see singular form, director)", - "rdfs:label": "directors", - "supercededBy": "schema:director" - }, - { - "@id": "schema:endDate", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Role", - "schema:Event", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries" - ], - "rangeIncludes": "schema:Date", - "rdfs:comment": "The end date and time of the role, event or item (in ISO 8601 date format).", - "rdfs:label": "endDate" - }, - { - "@id": "schema:episode", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Episode", - "rdfs:comment": "An episode of a TV/radio series or season", - "rdfs:label": "episode" - }, - { - "@id": "schema:episodes", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Episode", - "rdfs:comment": "An episode of a TV/radio series or season (legacy spelling; see singular form, episode)", - "rdfs:label": "episodes", - "supercededBy": "schema:episode" - }, - { - "@id": "schema:musicBy", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": [ - "schema:MusicGroup", - "schema:Person" - ], - "rdfs:comment": "The composer of the movie or TV/radio soundtrack.", - "rdfs:label": "musicBy" - }, - { - "@id": "schema:numberOfEpisodes", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Number", - "rdfs:comment": "The number of episodes in this season or series.", - "rdfs:label": "numberOfEpisodes" - }, - { - "@id": "schema:numberOfSeasons", - "@type": "rdf:Property", - "domainIncludes": "schema:Series", - "rangeIncludes": "schema:Number", - "rdfs:comment": "The number of seasons in this series.", - "rdfs:label": "numberOfSeasons" - }, - { - "@id": "schema:producer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The producer of the movie, tv/radio series, season, or episode, or video.", - "rdfs:label": "producer" - }, - { - "@id": "schema:productionCompany", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:VideoObject", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The production company or studio that made the movie, tv/radio series, season, or episode, or media object.", - "rdfs:label": "productionCompany" - }, - { - "@id": "schema:season", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Series", - "schema:TVSeries", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Season", - "rdfs:comment": "A season in a tv/radio series.", - "rdfs:label": "season" - }, - { - "@id": "schema:seasons", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Series", - "schema:TVSeries", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Season", - "rdfs:comment": "A season in a tv/radio series. (legacy spelling; see singular form, season)", - "rdfs:label": "seasons", - "supercededBy": "schema:season" - }, - { - "@id": "schema:startDate", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Role", - "schema:Event", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries" - ], - "rangeIncludes": "schema:Date", - "rdfs:comment": "The start date and time of the event, role or item (in ISO 8601 date format).", - "rdfs:label": "startDate" - }, - { - "@id": "schema:trailer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:VideoObject", - "rdfs:comment": "The trailer of a movie or tv/radio series, season, or episode.", - "rdfs:label": "trailer" - } - ] - }, - { - "@id": "schema:SoftwareApplication", - "@type": "rdfs:Class", - "rdfs:comment": "A software application.", - "rdfs:label": "SoftwareApplication", - "rdfs:subClassOf": "schema:CreativeWork", - "children": [ - { - "@id": "schema:MobileApplication", - "@type": "rdfs:Class", - "rdfs:comment": "A mobile software application.", - "rdfs:label": "MobileApplication", - "rdfs:subClassOf": "schema:SoftwareApplication", - "properties": [ - { - "@id": "schema:carrierRequirements", - "@type": "rdf:Property", - "domainIncludes": "schema:MobileApplication", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Specifies specific carrier(s) requirements for the application (e.g. an application may only work on a specific carrier network).", - "rdfs:label": "carrierRequirements" - } - ] - }, - { - "@id": "schema:WebApplication", - "@type": "rdfs:Class", - "rdfs:comment": "Web applications.", - "rdfs:label": "WebApplication", - "rdfs:subClassOf": "schema:SoftwareApplication", - "properties": [ - { - "@id": "schema:browserRequirements", - "@type": "rdf:Property", - "domainIncludes": "schema:WebApplication", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Specifies browser requirements in human-readable text. For example,\"requires HTML5 support\".", - "rdfs:label": "browserRequirements" - } - ] - } - ], - "properties": [ - { - "@id": "schema:applicationCategory", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": [ - "schema:Text", - "schema:URL" - ], - "rdfs:comment": "Type of software application, e.g. \"Game, Multimedia\".", - "rdfs:label": "applicationCategory" - }, - { - "@id": "schema:applicationSubCategory", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": [ - "schema:Text", - "schema:URL" - ], - "rdfs:comment": "Subcategory of the application, e.g. \"Arcade Game\".", - "rdfs:label": "applicationSubCategory" - }, - { - "@id": "schema:applicationSuite", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The name of the application suite to which the application belongs (e.g. Excel belongs to Office)", - "rdfs:label": "applicationSuite" - }, - { - "@id": "schema:countriesNotSupported", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Countries for which the application is not supported. You can also provide the two-letter ISO 3166-1 alpha-2 country code.", - "rdfs:label": "countriesNotSupported" - }, - { - "@id": "schema:countriesSupported", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Countries for which the application is supported. You can also provide the two-letter ISO 3166-1 alpha-2 country code.", - "rdfs:label": "countriesSupported" - }, - { - "@id": "schema:device", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Device required to run the application. Used in cases where a specific make/model is required to run the application.", - "rdfs:label": "device" - }, - { - "@id": "schema:downloadUrl", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": "schema:URL", - "rdfs:comment": "If the file can be downloaded, URL to download the binary.", - "rdfs:label": "downloadUrl" - }, - { - "@id": "schema:featureList", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": [ - "schema:Text", - "schema:URL" - ], - "rdfs:comment": "Features or modules provided by this application (and possibly required by other applications).", - "rdfs:label": "featureList" - }, - { - "@id": "schema:fileFormat", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": "schema:Text", - "rdfs:comment": "MIME format of the binary (e.g. application/zip).", - "rdfs:label": "fileFormat" - }, - { - "@id": "schema:fileSize", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": "schema:Integer", - "rdfs:comment": "Size of the application / package (e.g. 18MB). In the absence of a unit (MB, KB etc.), KB will be assumed.", - "rdfs:label": "fileSize" - }, - { - "@id": "schema:installUrl", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": "schema:URL", - "rdfs:comment": "URL at which the app may be installed, if different from the URL of the item.", - "rdfs:label": "installUrl" - }, - { - "@id": "schema:memoryRequirements", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": [ - "schema:Text", - "schema:URL" - ], - "rdfs:comment": "Minimum memory requirements.", - "rdfs:label": "memoryRequirements" - }, - { - "@id": "schema:operatingSystem", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Operating systems supported (Windows 7, OSX 10.6, Android 1.6).", - "rdfs:label": "operatingSystem" - }, - { - "@id": "schema:permissions", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Permission(s) required to run the app (for example, a mobile app may require full internet access or may run only on wifi).", - "rdfs:label": "permissions" - }, - { - "@id": "schema:processorRequirements", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Processor architecture required to run the application (e.g. IA64).", - "rdfs:label": "processorRequirements" - }, - { - "@id": "schema:releaseNotes", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": [ - "schema:Text", - "schema:URL" - ], - "rdfs:comment": "Description of what changed in this version.", - "rdfs:label": "releaseNotes" - }, - { - "@id": "schema:requirements", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": [ - "schema:Text", - "schema:URL" - ], - "rdfs:comment": "Component dependency requirements for application. This includes runtime environments and shared libraries that are not included in the application distribution package, but required to run the application (Examples: DirectX, Java or .NET runtime).", - "rdfs:label": "requirements" - }, - { - "@id": "schema:screenshot", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": [ - "schema:ImageObject", - "schema:URL" - ], - "rdfs:comment": "A link to a screenshot image of the app.", - "rdfs:label": "screenshot" - }, - { - "@id": "schema:softwareVersion", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Version of the software instance.", - "rdfs:label": "softwareVersion" - }, - { - "@id": "schema:storageRequirements", - "@type": "rdf:Property", - "domainIncludes": "schema:SoftwareApplication", - "rangeIncludes": [ - "schema:Text", - "schema:URL" - ], - "rdfs:comment": "Storage requirements (free space required).", - "rdfs:label": "storageRequirements" - } - ] - }, - { - "@id": "schema:TVSeason", - "@type": "rdfs:Class", - "rdfs:comment": "Season dedicated to TV broadcast and associated online delivery.", - "rdfs:label": "TVSeason", - "rdfs:subClassOf": [ - "schema:CreativeWork", - "schema:Season" - ], - "properties": [ - { - "@id": "schema:endDate", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Role", - "schema:Event", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries" - ], - "rangeIncludes": "schema:Date", - "rdfs:comment": "The end date and time of the role, event or item (in ISO 8601 date format).", - "rdfs:label": "endDate" - }, - { - "@id": "schema:episode", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Episode", - "rdfs:comment": "An episode of a TV/radio series or season", - "rdfs:label": "episode" - }, - { - "@id": "schema:episodes", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Episode", - "rdfs:comment": "An episode of a TV/radio series or season (legacy spelling; see singular form, episode)", - "rdfs:label": "episodes", - "supercededBy": "schema:episode" - }, - { - "@id": "schema:numberOfEpisodes", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Number", - "rdfs:comment": "The number of episodes in this season or series.", - "rdfs:label": "numberOfEpisodes" - }, - { - "@id": "schema:partOfSeries", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Clip", - "schema:RadioClip", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:TVClip" - ], - "rangeIncludes": "schema:Series", - "rdfs:comment": "The series to which this episode or season belongs.", - "rdfs:label": "partOfSeries" - }, - { - "@id": "schema:partOfTVSeries", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:TVEpisode", - "schema:TVSeason", - "schema:TVClip" - ], - "rangeIncludes": "schema:TVSeries", - "rdfs:comment": "The TV series to which this episode or season belongs. (legacy form; partOfSeries is preferred)", - "rdfs:label": "partOfTVSeries", - "supercededBy": "schema:partOfSeries" - }, - { - "@id": "schema:seasonNumber", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason" - ], - "rangeIncludes": "schema:Integer", - "rdfs:comment": "Position of the season within an ordered group of seasons.", - "rdfs:label": "seasonNumber" - }, - { - "@id": "schema:startDate", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Role", - "schema:Event", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries" - ], - "rangeIncludes": "schema:Date", - "rdfs:comment": "The start date and time of the event, role or item (in ISO 8601 date format).", - "rdfs:label": "startDate" - }, - { - "@id": "schema:trailer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:VideoObject", - "rdfs:comment": "The trailer of a movie or tv/radio series, season, or episode.", - "rdfs:label": "trailer" - } - ] - }, - { - "@id": "schema:TVSeries", - "@type": "rdfs:Class", - "rdfs:comment": "Series dedicated to TV broadcast and associated online delivery.", - "rdfs:label": "TVSeries", - "rdfs:subClassOf": [ - "schema:CreativeWork", - "schema:Series" - ], - "properties": [ - { - "@id": "schema:actor", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "A cast member of the movie, tv/radio series, season, episode, or video.", - "rdfs:label": "actor" - }, - { - "@id": "schema:actors", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "A cast member of the movie, tv/radio series, season, episode, or video. (legacy spelling; see singular form, actor)", - "rdfs:label": "actors", - "supercededBy": "schema:actor" - }, - { - "@id": "schema:director", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The director of the movie, tv/radio episode or series.", - "rdfs:label": "director" - }, - { - "@id": "schema:directors", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The director of the movie, tv/radio episode or series. (legacy spelling; see singular form, director)", - "rdfs:label": "directors", - "supercededBy": "schema:director" - }, - { - "@id": "schema:endDate", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Role", - "schema:Event", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries" - ], - "rangeIncludes": "schema:Date", - "rdfs:comment": "The end date and time of the role, event or item (in ISO 8601 date format).", - "rdfs:label": "endDate" - }, - { - "@id": "schema:episode", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Episode", - "rdfs:comment": "An episode of a TV/radio series or season", - "rdfs:label": "episode" - }, - { - "@id": "schema:episodes", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Episode", - "rdfs:comment": "An episode of a TV/radio series or season (legacy spelling; see singular form, episode)", - "rdfs:label": "episodes", - "supercededBy": "schema:episode" - }, - { - "@id": "schema:musicBy", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": [ - "schema:MusicGroup", - "schema:Person" - ], - "rdfs:comment": "The composer of the movie or TV/radio soundtrack.", - "rdfs:label": "musicBy" - }, - { - "@id": "schema:numberOfEpisodes", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Number", - "rdfs:comment": "The number of episodes in this season or series.", - "rdfs:label": "numberOfEpisodes" - }, - { - "@id": "schema:producer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Person", - "rdfs:comment": "The producer of the movie, tv/radio series, season, or episode, or video.", - "rdfs:label": "producer" - }, - { - "@id": "schema:productionCompany", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:Series", - "schema:TVSeries", - "schema:VideoObject", - "schema:RadioEpisode", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The production company or studio that made the movie, tv/radio series, season, or episode, or media object.", - "rdfs:label": "productionCompany" - }, - { - "@id": "schema:season", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Series", - "schema:TVSeries", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Season", - "rdfs:comment": "A season in a tv/radio series.", - "rdfs:label": "season" - }, - { - "@id": "schema:seasons", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Series", - "schema:TVSeries", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:Season", - "rdfs:comment": "A season in a tv/radio series. (legacy spelling; see singular form, season)", - "rdfs:label": "seasons", - "supercededBy": "schema:season" - }, - { - "@id": "schema:startDate", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Role", - "schema:Event", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries" - ], - "rangeIncludes": "schema:Date", - "rdfs:comment": "The start date and time of the event, role or item (in ISO 8601 date format).", - "rdfs:label": "startDate" - }, - { - "@id": "schema:trailer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Movie", - "schema:Episode", - "schema:TVEpisode", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries", - "schema:RadioEpisode", - "schema:RadioSeason", - "schema:RadioSeries" - ], - "rangeIncludes": "schema:VideoObject", - "rdfs:comment": "The trailer of a movie or tv/radio series, season, or episode.", - "rdfs:label": "trailer" - } - ] - }, - { - "@id": "schema:WebPage", - "@type": "rdfs:Class", - "rdfs:comment": "A web page. Every web page is implicitly assumed to be declared to be of type WebPage, so the various properties about that webpage, such as breadcrumb may be used. We recommend explicit declaration if these properties are specified, but if they are found outside of an itemscope, they will be assumed to be about the page", - "rdfs:label": "WebPage", - "rdfs:subClassOf": "schema:CreativeWork", - "children": [ - { - "@id": "schema:AboutPage", - "@type": "rdfs:Class", - "rdfs:comment": "Web page type: About page.", - "rdfs:label": "AboutPage", - "rdfs:subClassOf": "schema:WebPage" - }, - { - "@id": "schema:CheckoutPage", - "@type": "rdfs:Class", - "rdfs:comment": "Web page type: Checkout page.", - "rdfs:label": "CheckoutPage", - "rdfs:subClassOf": "schema:WebPage" - }, - { - "@id": "schema:CollectionPage", - "@type": "rdfs:Class", - "rdfs:comment": "Web page type: Collection page.", - "rdfs:label": "CollectionPage", - "rdfs:subClassOf": "schema:WebPage", - "children": [ - { - "@id": "schema:ImageGallery", - "@type": "rdfs:Class", - "rdfs:comment": "Web page type: Image gallery page.", - "rdfs:label": "ImageGallery", - "rdfs:subClassOf": "schema:CollectionPage" - }, - { - "@id": "schema:VideoGallery", - "@type": "rdfs:Class", - "rdfs:comment": "Web page type: Video gallery page.", - "rdfs:label": "VideoGallery", - "rdfs:subClassOf": "schema:CollectionPage" - } - ] - }, - { - "@id": "schema:ContactPage", - "@type": "rdfs:Class", - "rdfs:comment": "Web page type: Contact page.", - "rdfs:label": "ContactPage", - "rdfs:subClassOf": "schema:WebPage" - }, - { - "@id": "schema:ItemPage", - "@type": "rdfs:Class", - "rdfs:comment": "A page devoted to a single item, such as a particular product or hotel.", - "rdfs:label": "ItemPage", - "rdfs:subClassOf": "schema:WebPage" - }, - { - "@id": "schema:MedicalWebPage", - "@type": "rdfs:Class", - "rdfs:comment": "A web page that provides medical information.", - "rdfs:label": "MedicalWebPage", - "rdfs:subClassOf": "schema:WebPage", - "properties": [ - { - "@id": "schema:aspect", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalWebPage", - "rangeIncludes": "schema:Text", - "rdfs:comment": "An aspect of medical practice that is considered on the page, such as 'diagnosis', 'treatment', 'causes', 'prognosis', 'etiology', 'epidemiology', etc.", - "rdfs:label": "aspect" - } - ] - }, - { - "@id": "schema:ProfilePage", - "@type": "rdfs:Class", - "rdfs:comment": "Web page type: Profile page.", - "rdfs:label": "ProfilePage", - "rdfs:subClassOf": "schema:WebPage" - }, - { - "@id": "schema:QAPage", - "@type": "rdfs:Class", - "rdfs:comment": "A QAPage is a WebPage focussed on a specific Question and its Answer(s), e.g. in a question answering site or documenting Frequently Asked Questions (FAQs).", - "rdfs:label": "QAPage", - "rdfs:subClassOf": "schema:WebPage" - }, - { - "@id": "schema:SearchResultsPage", - "@type": "rdfs:Class", - "rdfs:comment": "Web page type: Search results page.", - "rdfs:label": "SearchResultsPage", - "rdfs:subClassOf": "schema:WebPage" - } - ], - "properties": [ - { - "@id": "schema:breadcrumb", - "@type": "rdf:Property", - "domainIncludes": "schema:WebPage", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A set of links that can help a user understand and navigate a website hierarchy.", - "rdfs:label": "breadcrumb" - }, - { - "@id": "schema:isPartOf", - "@type": "rdf:Property", - "domainIncludes": "schema:WebPage", - "rangeIncludes": "schema:CollectionPage", - "rdfs:comment": "Indicates the collection or gallery to which the item belongs.", - "rdfs:label": "isPartOf" - }, - { - "@id": "schema:lastReviewed", - "@type": "rdf:Property", - "domainIncludes": "schema:WebPage", - "rangeIncludes": "schema:Date", - "rdfs:comment": "Date on which the content on this web page was last reviewed for accuracy and/or completeness.", - "rdfs:label": "lastReviewed" - }, - { - "@id": "schema:mainContentOfPage", - "@type": "rdf:Property", - "domainIncludes": "schema:WebPage", - "rangeIncludes": "schema:WebPageElement", - "rdfs:comment": "Indicates if this web page element is the main subject of the page.", - "rdfs:label": "mainContentOfPage" - }, - { - "@id": "schema:primaryImageOfPage", - "@type": "rdf:Property", - "domainIncludes": "schema:WebPage", - "rangeIncludes": "schema:ImageObject", - "rdfs:comment": "Indicates the main image on the page.", - "rdfs:label": "primaryImageOfPage" - }, - { - "@id": "schema:relatedLink", - "@type": "rdf:Property", - "domainIncludes": "schema:WebPage", - "rangeIncludes": "schema:URL", - "rdfs:comment": "A link related to this web page, for example to other related web pages.", - "rdfs:label": "relatedLink" - }, - { - "@id": "schema:reviewedBy", - "@type": "rdf:Property", - "domainIncludes": "schema:WebPage", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "People or organizations that have reviewed the content on this web page for accuracy and/or completeness.", - "rdfs:label": "reviewedBy" - }, - { - "@id": "schema:significantLink", - "@type": "rdf:Property", - "domainIncludes": "schema:WebPage", - "rangeIncludes": "schema:URL", - "rdfs:comment": "One of the more significant URLs on the page. Typically, these are the non-navigation links that are clicked on the most.", - "rdfs:label": "significantLink" - }, - { - "@id": "schema:significantLinks", - "@type": "rdf:Property", - "domainIncludes": "schema:WebPage", - "rangeIncludes": "schema:URL", - "rdfs:comment": "The most significant URLs on the page. Typically, these are the non-navigation links that are clicked on the most (legacy spelling; see singular form, significantLink).", - "rdfs:label": "significantLinks", - "supercededBy": "schema:significantLink" - }, - { - "@id": "schema:specialty", - "@type": "rdf:Property", - "domainIncludes": "schema:WebPage", - "rangeIncludes": "schema:Specialty", - "rdfs:comment": "One of the domain specialities to which this web page's content applies.", - "rdfs:label": "specialty" - } - ] - }, - { - "@id": "schema:WebPageElement", - "@type": "rdfs:Class", - "rdfs:comment": "A web page element, like a table or an image", - "rdfs:label": "WebPageElement", - "rdfs:subClassOf": "schema:CreativeWork", - "children": [ - { - "@id": "schema:SiteNavigationElement", - "@type": "rdfs:Class", - "rdfs:comment": "A navigation element of the page.", - "rdfs:label": "SiteNavigationElement", - "rdfs:subClassOf": "schema:WebPageElement" - }, - { - "@id": "schema:Table", - "@type": "rdfs:Class", - "rdfs:comment": "A table on the page.", - "rdfs:label": "Table", - "rdfs:subClassOf": "schema:WebPageElement" - }, - { - "@id": "schema:WPAdBlock", - "@type": "rdfs:Class", - "rdfs:comment": "An advertising section of the page.", - "rdfs:label": "WPAdBlock", - "rdfs:subClassOf": "schema:WebPageElement" - }, - { - "@id": "schema:WPFooter", - "@type": "rdfs:Class", - "rdfs:comment": "The footer section of the page.", - "rdfs:label": "WPFooter", - "rdfs:subClassOf": "schema:WebPageElement" - }, - { - "@id": "schema:WPHeader", - "@type": "rdfs:Class", - "rdfs:comment": "The header section of the page.", - "rdfs:label": "WPHeader", - "rdfs:subClassOf": "schema:WebPageElement" - }, - { - "@id": "schema:WPSideBar", - "@type": "rdfs:Class", - "rdfs:comment": "A sidebar section of the page.", - "rdfs:label": "WPSideBar", - "rdfs:subClassOf": "schema:WebPageElement" - } - ] - } - ], - "properties": [ - { - "@id": "schema:about", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:CommunicateAction" - ], - "rangeIncludes": "schema:Thing", - "rdfs:comment": "The subject matter of the content.", - "rdfs:label": "about" - }, - { - "@id": "schema:accessibilityAPI", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Indicates that the resource is compatible with the referenced accessibility API (WebSchemas wiki lists possible values).\n ", - "rdfs:label": "accessibilityAPI" - }, - { - "@id": "schema:accessibilityControl", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Identifies input methods that are sufficient to fully control the described resource (WebSchemas wiki lists possible values).", - "rdfs:label": "accessibilityControl" - }, - { - "@id": "schema:accessibilityFeature", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Content features of the resource, such as accessible media, alternatives and supported enhancements for accessibility (WebSchemas wiki lists possible values).", - "rdfs:label": "accessibilityFeature" - }, - { - "@id": "schema:accessibilityHazard", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A characteristic of the described resource that is physiologically dangerous to some users. Related to WCAG 2.0 guideline 2.3. (WebSchemas wiki lists possible values)", - "rdfs:label": "accessibilityHazard" - }, - { - "@id": "schema:accountablePerson", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Person", - "rdfs:comment": "Specifies the Person that is legally accountable for the CreativeWork.", - "rdfs:label": "accountablePerson" - }, - { - "@id": "schema:aggregateRating", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:Offer", - "schema:Product" - ], - "rangeIncludes": "schema:AggregateRating", - "rdfs:comment": "The overall rating, based on a collection of reviews or ratings, of the item.", - "rdfs:label": "aggregateRating" - }, - { - "@id": "schema:alternativeHeadline", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A secondary title of the CreativeWork.", - "rdfs:label": "alternativeHeadline" - }, - { - "@id": "schema:associatedMedia", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:MediaObject", - "rdfs:comment": "The media objects that encode this creative work. This property is a synonym for encodings.", - "rdfs:label": "associatedMedia" - }, - { - "@id": "schema:audience", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Product", - "schema:PlayAction" - ], - "rangeIncludes": "schema:Audience", - "rdfs:comment": "The intended audience of the item, i.e. the group for whom the item was created.", - "rdfs:label": "audience" - }, - { - "@id": "schema:audio", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:AudioObject", - "rdfs:comment": "An embedded audio object.", - "rdfs:label": "audio" - }, - { - "@id": "schema:author", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "The author of this content. Please note that author is special in that HTML 5 provides a special mechanism for indicating authorship via the rel tag. That is equivalent to this and may be used interchangeably.", - "rdfs:label": "author" - }, - { - "@id": "schema:award", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "An award won by this person or for this creative work.", - "rdfs:label": "award" - }, - { - "@id": "schema:awards", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "Awards won by this person or for this creative work. (legacy spelling; see singular form, award)", - "rdfs:label": "awards", - "supercededBy": "schema:award" - }, - { - "@id": "schema:citation", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": [ - "schema:CreativeWork", - "schema:Text" - ], - "rdfs:comment": "A citation or reference to another creative work, such as another publication, web page, scholarly article, etc.", - "rdfs:label": "citation" - }, - { - "@id": "schema:comment", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": [ - "schema:UserComments", - "schema:Comment" - ], - "rdfs:comment": "Comments, typically from users, on this CreativeWork.", - "rdfs:label": "comment" - }, - { - "@id": "schema:commentCount", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Integer", - "rdfs:comment": "The number of comments this CreativeWork (e.g. Article, Question or Answer) has received. This is most applicable to works published in Web sites with commenting system; additional comments may exist elsewhere.", - "rdfs:label": "commentCount" - }, - { - "@id": "schema:contentLocation", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Place", - "rdfs:comment": "The location of the content.", - "rdfs:label": "contentLocation" - }, - { - "@id": "schema:contentRating", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Official rating of a piece of content—for example,'MPAA PG-13'.", - "rdfs:label": "contentRating" - }, - { - "@id": "schema:contributor", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A secondary contributor to the CreativeWork.", - "rdfs:label": "contributor" - }, - { - "@id": "schema:copyrightHolder", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "The party holding the legal copyright to the CreativeWork.", - "rdfs:label": "copyrightHolder" - }, - { - "@id": "schema:copyrightYear", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Number", - "rdfs:comment": "The year during which the claimed copyright for the CreativeWork was first asserted.", - "rdfs:label": "copyrightYear" - }, - { - "@id": "schema:creator", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:UserComments" - ], - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "The creator/author of this CreativeWork or UserComments. This is the same as the Author property for CreativeWork.", - "rdfs:label": "creator" - }, - { - "@id": "schema:dateCreated", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Date", - "rdfs:comment": "The date on which the CreativeWork was created.", - "rdfs:label": "dateCreated" - }, - { - "@id": "schema:dateModified", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Date", - "rdfs:comment": "The date on which the CreativeWork was most recently modified.", - "rdfs:label": "dateModified" - }, - { - "@id": "schema:datePublished", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Date", - "rdfs:comment": "Date of first broadcast/publication.", - "rdfs:label": "datePublished" - }, - { - "@id": "schema:discussionUrl", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:URL", - "rdfs:comment": "A link to the page containing the comments of the CreativeWork.", - "rdfs:label": "discussionUrl" - }, - { - "@id": "schema:editor", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Person", - "rdfs:comment": "Specifies the Person who edited the CreativeWork.", - "rdfs:label": "editor" - }, - { - "@id": "schema:educationalAlignment", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:AlignmentObject", - "rdfs:comment": "An alignment to an established educational framework.", - "rdfs:label": "educationalAlignment" - }, - { - "@id": "schema:educationalUse", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The purpose of a work in the context of education; for example, 'assignment', 'group work'.", - "rdfs:label": "educationalUse" - }, - { - "@id": "schema:encoding", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:MediaObject", - "rdfs:comment": "A media object that encode this CreativeWork.", - "rdfs:label": "encoding" - }, - { - "@id": "schema:encodings", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:MediaObject", - "rdfs:comment": "The media objects that encode this creative work (legacy spelling; see singular form, encoding).", - "rdfs:label": "encodings", - "supercededBy": "schema:encoding" - }, - { - "@id": "schema:genre", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Genre of the creative work", - "rdfs:label": "genre" - }, - { - "@id": "schema:headline", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Headline of the article", - "rdfs:label": "headline" - }, - { - "@id": "schema:inLanguage", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The language of the content. please use one of the language codes from the IETF BCP 47 standard.", - "rdfs:label": "inLanguage" - }, - { - "@id": "schema:interactionCount", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:MediaObject", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "A count of a specific user interactions with this item—for example, 20 UserLikes, 5 UserComments, or 300 UserDownloads. The user interaction type should be one of the sub types of UserInteraction.", - "rdfs:label": "interactionCount" - }, - { - "@id": "schema:interactivityType", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The predominant mode of learning supported by the learning resource. Acceptable values are 'active', 'expositive', or 'mixed'.", - "rdfs:label": "interactivityType" - }, - { - "@id": "schema:isBasedOnUrl", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:URL", - "rdfs:comment": "A resource that was used in the creation of this resource. This term can be repeated for multiple sources. For example, http://example.com/great-multiplication-intro.html", - "rdfs:label": "isBasedOnUrl" - }, - { - "@id": "schema:isFamilyFriendly", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Boolean", - "rdfs:comment": "Indicates whether this content is family friendly.", - "rdfs:label": "isFamilyFriendly" - }, - { - "@id": "schema:keywords", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Keywords or tags used to describe this content. Multiple entries in a keywords list are typically delimited by commas.", - "rdfs:label": "keywords" - }, - { - "@id": "schema:learningResourceType", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The predominant type or kind characterizing the learning resource. For example, 'presentation', 'handout'.", - "rdfs:label": "learningResourceType" - }, - { - "@id": "schema:license", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": [ - "schema:CreativeWork", - "schema:URL" - ], - "rdfs:comment": "A license document that applies to this content, typically indicated by URL.", - "rdfs:label": "license" - }, - { - "@id": "schema:mentions", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Thing", - "rdfs:comment": "Indicates that the CreativeWork contains a reference to, but is not necessarily about a concept.", - "rdfs:label": "mentions" - }, - { - "@id": "schema:offers", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:MediaObject", - "schema:Event", - "schema:Product" - ], - "rangeIncludes": "schema:Offer", - "rdfs:comment": "An offer to provide this item—for example, an offer to sell a product, rent the DVD of a movie, or give away tickets to an event.", - "rdfs:label": "offers" - }, - { - "@id": "schema:provider", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Service", - "schema:Reservation", - "schema:Flight", - "schema:TrainTrip", - "schema:BusTrip" - ], - "rangeIncludes": [ - "schema:Person", - "schema:Organization" - ], - "rdfs:comment": "The person or organization providing the service, reservation, or creative work. The provider may subcontract out the service.", - "rdfs:label": "provider" - }, - { - "@id": "schema:publisher", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The publisher of the creative work.", - "rdfs:label": "publisher" - }, - { - "@id": "schema:publishingPrinciples", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:URL", - "rdfs:comment": "Link to page describing the editorial principles of the organization primarily responsible for the creation of the CreativeWork.", - "rdfs:label": "publishingPrinciples" - }, - { - "@id": "schema:review", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:Offer", - "schema:Product" - ], - "rangeIncludes": "schema:Review", - "rdfs:comment": "A review of the item.", - "rdfs:label": "review" - }, - { - "@id": "schema:reviews", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:Offer", - "schema:Product" - ], - "rangeIncludes": "schema:Review", - "rdfs:comment": "Review of the item (legacy spelling; see singular form, review).", - "rdfs:label": "reviews", - "supercededBy": "schema:review" - }, - { - "@id": "schema:sourceOrganization", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The Organization on whose behalf the creator was working.", - "rdfs:label": "sourceOrganization" - }, - { - "@id": "schema:text", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The textual content of this CreativeWork.", - "rdfs:label": "text" - }, - { - "@id": "schema:thumbnailUrl", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:URL", - "rdfs:comment": "A thumbnail image relevant to the Thing.", - "rdfs:label": "thumbnailUrl" - }, - { - "@id": "schema:timeRequired", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Duration", - "rdfs:comment": "Approximate or typical time it takes to work with or through this learning resource for the typical intended target audience, e.g. 'P30M', 'P1H25M'.", - "rdfs:label": "timeRequired" - }, - { - "@id": "schema:typicalAgeRange", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Event" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The typical expected age range, e.g. '7-9', '11-'.", - "rdfs:label": "typicalAgeRange" - }, - { - "@id": "schema:version", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:Number", - "rdfs:comment": "The version of the CreativeWork embodied by a specified resource.", - "rdfs:label": "version" - }, - { - "@id": "schema:video", - "@type": "rdf:Property", - "domainIncludes": "schema:CreativeWork", - "rangeIncludes": "schema:VideoObject", - "rdfs:comment": "An embedded video object.", - "rdfs:label": "video" - } - ] - }, - { - "@id": "schema:Event", - "@type": "rdfs:Class", - "owl:equivalentClass": "dc:Event", - "rdfs:comment": "An event happening at a certain time and location, such as a concert, lecture, or festival. Ticketing information may be added via the 'offers' property. Repeated events may be structured as separate Event objects.", - "rdfs:label": "Event", - "rdfs:subClassOf": "schema:Thing", - "children": [ - { - "@id": "schema:BusinessEvent", - "@type": "rdfs:Class", - "rdfs:comment": "Event type: Business event.", - "rdfs:label": "BusinessEvent", - "rdfs:subClassOf": "schema:Event" - }, - { - "@id": "schema:ChildrensEvent", - "@type": "rdfs:Class", - "rdfs:comment": "Event type: Children's event.", - "rdfs:label": "ChildrensEvent", - "rdfs:subClassOf": "schema:Event" - }, - { - "@id": "schema:ComedyEvent", - "@type": "rdfs:Class", - "rdfs:comment": "Event type: Comedy event.", - "rdfs:label": "ComedyEvent", - "rdfs:subClassOf": "schema:Event" - }, - { - "@id": "schema:DanceEvent", - "@type": "rdfs:Class", - "rdfs:comment": "Event type: A social dance.", - "rdfs:label": "DanceEvent", - "rdfs:subClassOf": "schema:Event" - }, - { - "@id": "schema:DeliveryEvent", - "@type": "rdfs:Class", - "rdfs:comment": "An event involving the delivery of an item.", - "rdfs:label": "DeliveryEvent", - "rdfs:subClassOf": "schema:Event", - "properties": [ - { - "@id": "schema:accessCode", - "@type": "rdf:Property", - "domainIncludes": "schema:DeliveryEvent", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Password, PIN, or access code needed for delivery (e.g. from a locker).", - "rdfs:label": "accessCode" - }, - { - "@id": "schema:availableFrom", - "@type": "rdf:Property", - "domainIncludes": "schema:DeliveryEvent", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "When the item is available for pickup from the store, locker, etc.", - "rdfs:label": "availableFrom" - }, - { - "@id": "schema:availableThrough", - "@type": "rdf:Property", - "domainIncludes": "schema:DeliveryEvent", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "After this date, the item will no longer be available for pickup.", - "rdfs:label": "availableThrough" - }, - { - "@id": "schema:hasDeliveryMethod", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DeliveryEvent", - "schema:ParcelDelivery" - ], - "rangeIncludes": "schema:DeliveryMethod", - "rdfs:comment": "Method used for delivery or shipping.", - "rdfs:label": "hasDeliveryMethod" - } - ] - }, - { - "@id": "schema:EducationEvent", - "@type": "rdfs:Class", - "rdfs:comment": "Event type: Education event.", - "rdfs:label": "EducationEvent", - "rdfs:subClassOf": "schema:Event" - }, - { - "@id": "schema:Festival", - "@type": "rdfs:Class", - "rdfs:comment": "Event type: Festival.", - "rdfs:label": "Festival", - "rdfs:subClassOf": "schema:Event" - }, - { - "@id": "schema:FoodEvent", - "@type": "rdfs:Class", - "rdfs:comment": "Event type: Food event.", - "rdfs:label": "FoodEvent", - "rdfs:subClassOf": "schema:Event" - }, - { - "@id": "schema:LiteraryEvent", - "@type": "rdfs:Class", - "rdfs:comment": "Event type: Literary event.", - "rdfs:label": "LiteraryEvent", - "rdfs:subClassOf": "schema:Event" - }, - { - "@id": "schema:MusicEvent", - "@type": "rdfs:Class", - "rdfs:comment": "Event type: Music event.", - "rdfs:label": "MusicEvent", - "rdfs:subClassOf": "schema:Event" - }, - { - "@id": "schema:PublicationEvent", - "@type": "rdfs:Class", - "rdfs:comment": "A PublicationEvent corresponds indifferently to the event of publication for a CreativeWork of any type e.g. a broadcast event, an on-demand event, a book/journal publication via a variety of delivery media.", - "rdfs:label": "PublicationEvent", - "rdfs:subClassOf": "schema:Event", - "children": [ - { - "@id": "schema:BroadcastEvent", - "@type": "rdfs:Class", - "rdfs:comment": "An over the air or online broadcast event.", - "rdfs:label": "BroadcastEvent", - "rdfs:subClassOf": "schema:PublicationEvent" - }, - { - "@id": "schema:OnDemandEvent", - "@type": "rdfs:Class", - "rdfs:comment": "A publication event e.g. catch-up TV or radio podcast, during which a program is available on-demand.", - "rdfs:label": "OnDemandEvent", - "rdfs:subClassOf": "schema:PublicationEvent" - } - ], - "properties": [ - { - "@id": "schema:free", - "@type": "rdf:Property", - "domainIncludes": "schema:PublicationEvent", - "rangeIncludes": "schema:Boolean", - "rdfs:comment": "A flag to signal that the publication is accessible for free.", - "rdfs:label": "free" - }, - { - "@id": "schema:publishedOn", - "@type": "rdf:Property", - "domainIncludes": "schema:PublicationEvent", - "rangeIncludes": "schema:BroadcastService", - "rdfs:comment": "A broadcast service associated with the publication event.", - "rdfs:label": "publishedOn" - } - ] - }, - { - "@id": "schema:SaleEvent", - "@type": "rdfs:Class", - "rdfs:comment": "Event type: Sales event.", - "rdfs:label": "SaleEvent", - "rdfs:subClassOf": "schema:Event" - }, - { - "@id": "schema:SocialEvent", - "@type": "rdfs:Class", - "rdfs:comment": "Event type: Social event.", - "rdfs:label": "SocialEvent", - "rdfs:subClassOf": "schema:Event" - }, - { - "@id": "schema:SportsEvent", - "@type": "rdfs:Class", - "rdfs:comment": "Event type: Sports event.", - "rdfs:label": "SportsEvent", - "rdfs:subClassOf": "schema:Event" - }, - { - "@id": "schema:TheaterEvent", - "@type": "rdfs:Class", - "rdfs:comment": "Event type: Theater performance.", - "rdfs:label": "TheaterEvent", - "rdfs:subClassOf": "schema:Event" - }, - { - "@id": "schema:UserInteraction", - "@type": "rdfs:Class", - "rdfs:comment": "A user interacting with a page", - "rdfs:label": "UserInteraction", - "rdfs:subClassOf": "schema:Event", - "children": [ - { - "@id": "schema:UserBlocks", - "@type": "rdfs:Class", - "rdfs:comment": "User interaction: Block this content.", - "rdfs:label": "UserBlocks", - "rdfs:subClassOf": "schema:UserInteraction" - }, - { - "@id": "schema:UserCheckins", - "@type": "rdfs:Class", - "rdfs:comment": "User interaction: Check-in at a place.", - "rdfs:label": "UserCheckins", - "rdfs:subClassOf": "schema:UserInteraction" - }, - { - "@id": "schema:UserComments", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_rNews" - }, - "rdfs:comment": "The UserInteraction event in which a user comments on an item.", - "rdfs:label": "UserComments", - "rdfs:subClassOf": "schema:UserInteraction", - "properties": [ - { - "@id": "schema:commentText", - "@type": "rdf:Property", - "domainIncludes": "schema:UserComments", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The text of the UserComment.", - "rdfs:label": "commentText" - }, - { - "@id": "schema:commentTime", - "@type": "rdf:Property", - "domainIncludes": "schema:UserComments", - "rangeIncludes": "schema:Date", - "rdfs:comment": "The time at which the UserComment was made.", - "rdfs:label": "commentTime" - }, - { - "@id": "schema:creator", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:UserComments" - ], - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "The creator/author of this CreativeWork or UserComments. This is the same as the Author property for CreativeWork.", - "rdfs:label": "creator" - }, - { - "@id": "schema:discusses", - "@type": "rdf:Property", - "domainIncludes": "schema:UserComments", - "rangeIncludes": "schema:CreativeWork", - "rdfs:comment": "Specifies the CreativeWork associated with the UserComment.", - "rdfs:label": "discusses" - }, - { - "@id": "schema:replyToUrl", - "@type": "rdf:Property", - "domainIncludes": "schema:UserComments", - "rangeIncludes": "schema:URL", - "rdfs:comment": "The URL at which a reply may be posted to the specified UserComment.", - "rdfs:label": "replyToUrl" - } - ] - }, - { - "@id": "schema:UserDownloads", - "@type": "rdfs:Class", - "rdfs:comment": "User interaction: Download of an item.", - "rdfs:label": "UserDownloads", - "rdfs:subClassOf": "schema:UserInteraction" - }, - { - "@id": "schema:UserLikes", - "@type": "rdfs:Class", - "rdfs:comment": "User interaction: Like an item.", - "rdfs:label": "UserLikes", - "rdfs:subClassOf": "schema:UserInteraction" - }, - { - "@id": "schema:UserPageVisits", - "@type": "rdfs:Class", - "rdfs:comment": "User interaction: Visit to a web page.", - "rdfs:label": "UserPageVisits", - "rdfs:subClassOf": "schema:UserInteraction" - }, - { - "@id": "schema:UserPlays", - "@type": "rdfs:Class", - "rdfs:comment": "User interaction: Play count of an item, for example a video or a song.", - "rdfs:label": "UserPlays", - "rdfs:subClassOf": "schema:UserInteraction" - }, - { - "@id": "schema:UserPlusOnes", - "@type": "rdfs:Class", - "rdfs:comment": "User interaction: +1.", - "rdfs:label": "UserPlusOnes", - "rdfs:subClassOf": "schema:UserInteraction" - }, - { - "@id": "schema:UserTweets", - "@type": "rdfs:Class", - "rdfs:comment": "User interaction: Tweets.", - "rdfs:label": "UserTweets", - "rdfs:subClassOf": "schema:UserInteraction" - } - ] - }, - { - "@id": "schema:VisualArtsEvent", - "@type": "rdfs:Class", - "rdfs:comment": "Event type: Visual arts event.", - "rdfs:label": "VisualArtsEvent", - "rdfs:subClassOf": "schema:Event" - } - ], - "properties": [ - { - "@id": "schema:attendee", - "@type": "rdf:Property", - "domainIncludes": "schema:Event", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A person or organization attending the event.", - "rdfs:label": "attendee" - }, - { - "@id": "schema:attendees", - "@type": "rdf:Property", - "domainIncludes": "schema:Event", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A person attending the event (legacy spelling; see singular form, attendee).", - "rdfs:label": "attendees", - "supercededBy": "schema:attendees" - }, - { - "@id": "schema:doorTime", - "@type": "rdf:Property", - "domainIncludes": "schema:Event", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The time admission will commence.", - "rdfs:label": "doorTime" - }, - { - "@id": "schema:duration", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Event", - "schema:Movie", - "schema:MusicRecording" - ], - "rangeIncludes": "schema:Duration", - "rdfs:comment": "The duration of the item (movie, audio recording, event, etc.) in ISO 8601 date format.", - "rdfs:label": "duration" - }, - { - "@id": "schema:endDate", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Role", - "schema:Event", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries" - ], - "rangeIncludes": "schema:Date", - "rdfs:comment": "The end date and time of the role, event or item (in ISO 8601 date format).", - "rdfs:label": "endDate" - }, - { - "@id": "schema:eventStatus", - "@type": "rdf:Property", - "domainIncludes": "schema:Event", - "rangeIncludes": "schema:EventStatusType", - "rdfs:comment": "An eventStatus of an event represents its status; particularly useful when an event is cancelled or rescheduled.", - "rdfs:label": "eventStatus" - }, - { - "@id": "schema:location", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Event", - "schema:Action" - ], - "rangeIncludes": [ - "schema:Place", - "schema:PostalAddress" - ], - "rdfs:comment": "The location of the event, organization or action.", - "rdfs:label": "location" - }, - { - "@id": "schema:offers", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:MediaObject", - "schema:Event", - "schema:Product" - ], - "rangeIncludes": "schema:Offer", - "rdfs:comment": "An offer to provide this item—for example, an offer to sell a product, rent the DVD of a movie, or give away tickets to an event.", - "rdfs:label": "offers" - }, - { - "@id": "schema:performer", - "@type": "rdf:Property", - "domainIncludes": "schema:Event", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A performer at the event—for example, a presenter, musician, musical group or actor.", - "rdfs:label": "performer" - }, - { - "@id": "schema:performers", - "@type": "rdf:Property", - "domainIncludes": "schema:Event", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "The main performer or performers of the event—for example, a presenter, musician, or actor (legacy spelling; see singular form, performer).", - "rdfs:label": "performers", - "supercededBy": "schema:performer" - }, - { - "@id": "schema:previousStartDate", - "@type": "rdf:Property", - "domainIncludes": "schema:Event", - "rangeIncludes": "schema:Date", - "rdfs:comment": "Used in conjunction with eventStatus for rescheduled or cancelled events. This property contains the previously scheduled start date. For rescheduled events, the startDate property should be used for the newly scheduled start date. In the (rare) case of an event that has been postponed and rescheduled multiple times, this field may be repeated.", - "rdfs:label": "previousStartDate" - }, - { - "@id": "schema:startDate", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Role", - "schema:Event", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries" - ], - "rangeIncludes": "schema:Date", - "rdfs:comment": "The start date and time of the event, role or item (in ISO 8601 date format).", - "rdfs:label": "startDate" - }, - { - "@id": "schema:subEvent", - "@type": "rdf:Property", - "domainIncludes": "schema:Event", - "rangeIncludes": "schema:Event", - "rdfs:comment": "An Event that is part of this event. For example, a conference event includes many presentations, each of which is a subEvent of the conference.", - "rdfs:label": "subEvent" - }, - { - "@id": "schema:subEvents", - "@type": "rdf:Property", - "domainIncludes": "schema:Event", - "rangeIncludes": "schema:Event", - "rdfs:comment": "Events that are a part of this event. For example, a conference event includes many presentations, each subEvents of the conference (legacy spelling; see singular form, subEvent).", - "rdfs:label": "subEvents", - "supercededBy": "schema:subEvent" - }, - { - "@id": "schema:superEvent", - "@type": "rdf:Property", - "domainIncludes": "schema:Event", - "rangeIncludes": "schema:Event", - "rdfs:comment": "An event that this event is a part of. For example, a collection of individual music performances might each have a music festival as their superEvent.", - "rdfs:label": "superEvent" - }, - { - "@id": "schema:typicalAgeRange", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Event" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The typical expected age range, e.g. '7-9', '11-'.", - "rdfs:label": "typicalAgeRange" - }, - { - "@id": "schema:workPerformed", - "@type": "rdf:Property", - "domainIncludes": "schema:Event", - "rangeIncludes": "schema:CreativeWork", - "rdfs:comment": "A work performed in some event, for example a play performed in a TheaterEvent.", - "rdfs:label": "workPerformed" - } - ] - }, - { - "@id": "schema:Intangible", - "@type": "rdfs:Class", - "rdfs:comment": "A utility class that serves as the umbrella for a number of 'intangible' things such as quantities, structured values, etc.", - "rdfs:label": "Intangible", - "rdfs:subClassOf": "schema:Thing", - "children": [ - { - "@id": "schema:AlignmentObject", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_LRMIClass" - }, - "rdfs:comment": "An intangible item that describes an alignment between a learning resource and a node in an educational framework.", - "rdfs:label": "AlignmentObject", - "rdfs:subClassOf": "schema:Intangible", - "properties": [ - { - "@id": "schema:alignmentType", - "@type": "rdf:Property", - "domainIncludes": "schema:AlignmentObject", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A category of alignment between the learning resource and the framework node. Recommended values include: 'assesses', 'teaches', 'requires', 'textComplexity', 'readingLevel', 'educationalSubject', and 'educationLevel'.", - "rdfs:label": "alignmentType" - }, - { - "@id": "schema:educationalFramework", - "@type": "rdf:Property", - "domainIncludes": "schema:AlignmentObject", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The framework to which the resource being described is aligned.", - "rdfs:label": "educationalFramework" - }, - { - "@id": "schema:targetDescription", - "@type": "rdf:Property", - "domainIncludes": "schema:AlignmentObject", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The description of a node in an established educational framework.", - "rdfs:label": "targetDescription" - }, - { - "@id": "schema:targetName", - "@type": "rdf:Property", - "domainIncludes": "schema:AlignmentObject", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The name of a node in an established educational framework.", - "rdfs:label": "targetName" - }, - { - "@id": "schema:targetUrl", - "@type": "rdf:Property", - "domainIncludes": "schema:AlignmentObject", - "rangeIncludes": "schema:URL", - "rdfs:comment": "The URL of a node in an established educational framework.", - "rdfs:label": "targetUrl" - } - ] - }, - { - "@id": "schema:Audience", - "@type": "rdfs:Class", - "rdfs:comment": "Intended audience for an item, i.e. the group for whom the item was created.", - "rdfs:label": "Audience", - "rdfs:subClassOf": "schema:Intangible", - "children": [ - { - "@id": "schema:BusinessAudience", - "@type": "rdfs:Class", - "rdfs:comment": "A set of characteristics belonging to businesses, e.g. who compose an item's target audience.", - "rdfs:label": "BusinessAudience", - "rdfs:subClassOf": "schema:Audience", - "properties": [ - { - "@id": "schema:numberofEmployees", - "@type": "rdf:Property", - "domainIncludes": "schema:BusinessAudience", - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The size of business by number of employees.", - "rdfs:label": "numberofEmployees" - }, - { - "@id": "schema:yearlyRevenue", - "@type": "rdf:Property", - "domainIncludes": "schema:BusinessAudience", - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The size of the business in annual revenue.", - "rdfs:label": "yearlyRevenue" - }, - { - "@id": "schema:yearsInOperation", - "@type": "rdf:Property", - "domainIncludes": "schema:BusinessAudience", - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The age of the business.", - "rdfs:label": "yearsInOperation" - } - ] - }, - { - "@id": "schema:EducationalAudience", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_LRMIClass" - }, - "rdfs:comment": "An EducationalAudience", - "rdfs:label": "EducationalAudience", - "rdfs:subClassOf": "schema:Audience", - "properties": [ - { - "@id": "schema:educationalRole", - "@type": "rdf:Property", - "domainIncludes": "schema:EducationalAudience", - "rangeIncludes": "schema:Text", - "rdfs:comment": "An educationalRole of an EducationalAudience", - "rdfs:label": "educationalRole" - } - ] - }, - { - "@id": "schema:MedicalAudience", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Target audiences for medical web pages. Enumerated type.", - "rdfs:label": "MedicalAudience", - "rdfs:subClassOf": [ - "schema:Audience", - "schema:MedicalEnumeration", - "schema:Enumeration", - "schema:PeopleAudience" - ] - }, - { - "@id": "schema:PeopleAudience", - "@type": "rdfs:Class", - "rdfs:comment": "A set of characteristics belonging to people, e.g. who compose an item's target audience.", - "rdfs:label": "PeopleAudience", - "rdfs:subClassOf": "schema:Audience", - "children": [ - { - "@id": "schema:MedicalAudience", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Target audiences for medical web pages. Enumerated type.", - "rdfs:label": "MedicalAudience", - "rdfs:subClassOf": [ - "schema:Audience", - "schema:MedicalEnumeration", - "schema:Enumeration", - "schema:PeopleAudience" - ] - }, - { - "@id": "schema:ParentAudience", - "@type": "rdfs:Class", - "rdfs:comment": "A set of characteristics describing parents, who can be interested in viewing some content", - "rdfs:label": "ParentAudience", - "rdfs:subClassOf": "schema:PeopleAudience", - "properties": [ - { - "@id": "schema:childMaxAge", - "@type": "rdf:Property", - "domainIncludes": "schema:ParentAudience", - "rangeIncludes": "schema:Number", - "rdfs:comment": "Maximal age of the child", - "rdfs:label": "childMaxAge" - }, - { - "@id": "schema:childMinAge", - "@type": "rdf:Property", - "domainIncludes": "schema:ParentAudience", - "rangeIncludes": "schema:Number", - "rdfs:comment": "Minimal age of the child", - "rdfs:label": "childMinAge" - } - ] - } - ], - "properties": [ - { - "@id": "schema:healthCondition", - "@type": "rdf:Property", - "domainIncludes": "schema:PeopleAudience", - "rangeIncludes": "schema:MedicalCondition", - "rdfs:comment": "Expectations for health conditions of target audience", - "rdfs:label": "healthCondition" - }, - { - "@id": "schema:requiredGender", - "@type": "rdf:Property", - "domainIncludes": "schema:PeopleAudience", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Audiences defined by a person's gender.", - "rdfs:label": "requiredGender" - }, - { - "@id": "schema:requiredMaxAge", - "@type": "rdf:Property", - "domainIncludes": "schema:PeopleAudience", - "rangeIncludes": "schema:Integer", - "rdfs:comment": "Audiences defined by a person's maximum age.", - "rdfs:label": "requiredMaxAge" - }, - { - "@id": "schema:requiredMinAge", - "@type": "rdf:Property", - "domainIncludes": "schema:PeopleAudience", - "rangeIncludes": "schema:Integer", - "rdfs:comment": "Audiences defined by a person's minimum age.", - "rdfs:label": "requiredMinAge" - }, - { - "@id": "schema:suggestedGender", - "@type": "rdf:Property", - "domainIncludes": "schema:PeopleAudience", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The gender of the person or audience.", - "rdfs:label": "suggestedGender" - }, - { - "@id": "schema:suggestedMaxAge", - "@type": "rdf:Property", - "domainIncludes": "schema:PeopleAudience", - "rangeIncludes": "schema:Number", - "rdfs:comment": "Maximal age recommended for viewing content.", - "rdfs:label": "suggestedMaxAge" - }, - { - "@id": "schema:suggestedMinAge", - "@type": "rdf:Property", - "domainIncludes": "schema:PeopleAudience", - "rangeIncludes": "schema:Number", - "rdfs:comment": "Minimal age recommended for viewing content.", - "rdfs:label": "suggestedMinAge" - } - ] - } - ], - "properties": [ - { - "@id": "schema:audienceType", - "@type": "rdf:Property", - "domainIncludes": "schema:Audience", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The target group associated with a given audience (e.g. veterans, car owners, musicians, etc.)\n domain: Audience\n Range: Text\n ", - "rdfs:label": "audienceType" - }, - { - "@id": "schema:geographicArea", - "@type": "rdf:Property", - "domainIncludes": "schema:Audience", - "rangeIncludes": "schema:AdministrativeArea", - "rdfs:comment": "The geographic area associated with the audience.", - "rdfs:label": "geographicArea" - } - ] - }, - { - "@id": "schema:Brand", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A brand is a name used by an organization or business person for labeling a product, product group, or similar.", - "rdfs:label": "Brand", - "rdfs:subClassOf": "schema:Intangible", - "properties": [ - { - "@id": "schema:logo", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:Product", - "schema:Brand" - ], - "rangeIncludes": [ - "schema:ImageObject", - "schema:URL" - ], - "rdfs:comment": "A logo associated with an organization.", - "rdfs:label": "logo" - } - ] - }, - { - "@id": "schema:BusTrip", - "@type": "rdfs:Class", - "rdfs:comment": "A trip on a commercial bus line.", - "rdfs:label": "Bus Trip", - "rdfs:subClassOf": "schema:Intangible", - "properties": [ - { - "@id": "schema:arrivalBusStop", - "@type": "rdf:Property", - "domainIncludes": "schema:BusTrip", - "rangeIncludes": [ - "schema:BusStation", - "schema:BusStop" - ], - "rdfs:comment": "The stop or station from which the bus arrives.", - "rdfs:label": "arrivalBusStop" - }, - { - "@id": "schema:arrivalTime", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:BusTrip", - "schema:Flight", - "schema:TrainTrip" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The expected arrival time.", - "rdfs:label": "arrivalTime" - }, - { - "@id": "schema:busName", - "@type": "rdf:Property", - "domainIncludes": "schema:BusTrip", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The name of the bus (e.g. Bolt Express).", - "rdfs:label": "busName" - }, - { - "@id": "schema:busNumber", - "@type": "rdf:Property", - "domainIncludes": "schema:BusTrip", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The unique identifier for the bus.", - "rdfs:label": "busNumber" - }, - { - "@id": "schema:departureBusStop", - "@type": "rdf:Property", - "domainIncludes": "schema:BusTrip", - "rangeIncludes": [ - "schema:BusStation", - "schema:BusStop" - ], - "rdfs:comment": "The stop or station from which the bus departs.", - "rdfs:label": "departureBusStop" - }, - { - "@id": "schema:departureTime", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:BusTrip", - "schema:Flight", - "schema:TrainTrip" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The expected departure time.", - "rdfs:label": "departureTime" - }, - { - "@id": "schema:provider", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Service", - "schema:Reservation", - "schema:Flight", - "schema:TrainTrip", - "schema:BusTrip" - ], - "rangeIncludes": [ - "schema:Person", - "schema:Organization" - ], - "rdfs:comment": "The person or organization providing the service, reservation, or creative work. The provider may subcontract out the service.", - "rdfs:label": "provider" - } - ] - }, - { - "@id": "schema:Class", - "@type": "rdfs:Class", - "rdfs:comment": "A class, also often called a 'Type'; equivalent to rdfs:Class.", - "rdfs:label": "Class", - "rdfs:subClassOf": "schema:Intangible" - }, - { - "@id": "schema:Demand", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A demand entity represents the public, not necessarily binding, not necessarily exclusive, announcement by an organization or person to seek a certain type of goods or services. For describing demand using this type, the very same properties used for Offer apply.", - "rdfs:label": "Demand", - "rdfs:subClassOf": "schema:Intangible", - "properties": [ - { - "@id": "schema:acceptedPaymentMethod", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:PaymentMethod", - "rdfs:comment": "The payment method(s) accepted by seller for this offer.", - "rdfs:label": "acceptedPaymentMethod" - }, - { - "@id": "schema:advanceBookingRequirement", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The amount of time that is required between accepting the offer and the actual usage of the resource or service.", - "rdfs:label": "advanceBookingRequirement" - }, - { - "@id": "schema:availability", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:ItemAvailability", - "rdfs:comment": "The availability of this item—for example In stock, Out of stock, Pre-order, etc.", - "rdfs:label": "availability" - }, - { - "@id": "schema:availabilityEnds", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The end of the availability of the product or service included in the offer.", - "rdfs:label": "availabilityEnds" - }, - { - "@id": "schema:availabilityStarts", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The beginning of the availability of the product or service included in the offer.", - "rdfs:label": "availabilityStarts" - }, - { - "@id": "schema:availableAtOrFrom", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:Place", - "rdfs:comment": "The place(s) from which the offer can be obtained (e.g. store locations).", - "rdfs:label": "availableAtOrFrom" - }, - { - "@id": "schema:availableDeliveryMethod", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:DeliveryMethod", - "rdfs:comment": "The delivery method(s) available for this offer.", - "rdfs:label": "availableDeliveryMethod" - }, - { - "@id": "schema:businessFunction", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand", - "schema:TypeAndQuantityNode" - ], - "rangeIncludes": "schema:BusinessFunction", - "rdfs:comment": "The business function (e.g. sell, lease, repair, dispose) of the offer or component of a bundle (TypeAndQuantityNode). The default is http://purl.org/goodrelations/v1#Sell.", - "rdfs:label": "businessFunction" - }, - { - "@id": "schema:deliveryLeadTime", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The typical delay between the receipt of the order and the goods leaving the warehouse.", - "rdfs:label": "deliveryLeadTime" - }, - { - "@id": "schema:eligibleCustomerType", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:BusinessEntityType", - "rdfs:comment": "The type(s) of customers for which the given offer is valid.", - "rdfs:label": "eligibleCustomerType" - }, - { - "@id": "schema:eligibleDuration", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The duration for which the given offer is valid.", - "rdfs:label": "eligibleDuration" - }, - { - "@id": "schema:eligibleQuantity", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:Demand" - ], - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The interval and unit of measurement of ordering quantities for which the offer or price specification is valid. This allows e.g. specifying that a certain freight charge is valid only for a certain quantity.", - "rdfs:label": "eligibleQuantity" - }, - { - "@id": "schema:eligibleRegion", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:DeliveryChargeSpecification", - "schema:Demand" - ], - "rangeIncludes": [ - "schema:GeoShape", - "schema:Text" - ], - "rdfs:comment": "The ISO 3166-1 (ISO 3166-1 alpha-2) or ISO 3166-2 code, or the GeoShape for the geo-political region(s) for which the offer or delivery charge specification is valid.", - "rdfs:label": "eligibleRegion" - }, - { - "@id": "schema:eligibleTransactionVolume", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:Demand" - ], - "rangeIncludes": "schema:PriceSpecification", - "rdfs:comment": "The transaction volume, in a monetary unit, for which the offer or price specification is valid, e.g. for indicating a minimal purchasing volume, to express free shipping above a certain order volume, or to limit the acceptance of credit cards to purchases to a certain minimal amount.", - "rdfs:label": "eligibleTransactionVolume" - }, - { - "@id": "schema:gtin13", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The GTIN-13 code of the product, or the product to which the offer refers. This is equivalent to 13-digit ISBN codes and EAN UCC-13. Former 12-digit UPC codes can be converted into a GTIN-13 code by simply adding a preceeding zero.", - "rdfs:label": "gtin13" - }, - { - "@id": "schema:gtin14", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The GTIN-14 code of the product, or the product to which the offer refers.", - "rdfs:label": "gtin14" - }, - { - "@id": "schema:gtin8", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The GTIN-8 code of the product, or the product to which the offer refers. This code is also known as EAN/UCC-8 or 8-digit EAN.", - "rdfs:label": "gtin8" - }, - { - "@id": "schema:includesObject", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:TypeAndQuantityNode", - "rdfs:comment": "This links to a node or nodes indicating the exact quantity of the products included in the offer.", - "rdfs:label": "includesObject" - }, - { - "@id": "schema:inventoryLevel", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand", - "schema:SomeProducts" - ], - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The current approximate inventory level for the item or items.", - "rdfs:label": "inventoryLevel" - }, - { - "@id": "schema:itemCondition", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:OfferItemCondition", - "rdfs:comment": "A predefined value from OfferItemCondition or a textual description of the condition of the product or service, or the products or services included in the offer.", - "rdfs:label": "itemCondition" - }, - { - "@id": "schema:itemOffered", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:Product", - "rdfs:comment": "The item being offered.", - "rdfs:label": "itemOffered" - }, - { - "@id": "schema:mpn", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The Manufacturer Part Number (MPN) of the product, or the product to which the offer refers.", - "rdfs:label": "mpn" - }, - { - "@id": "schema:priceSpecification", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:PriceSpecification", - "rdfs:comment": "One or more detailed price specifications, indicating the unit price and delivery or payment charges.", - "rdfs:label": "priceSpecification" - }, - { - "@id": "schema:seller", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "The organization or person making the offer.", - "rdfs:label": "seller" - }, - { - "@id": "schema:serialNumber", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand", - "schema:IndividualProduct" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The serial number or any alphanumeric identifier of a particular product. When attached to an offer, it is a shortcut for the serial number of the product included in the offer.", - "rdfs:label": "serialNumber" - }, - { - "@id": "schema:sku", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The Stock Keeping Unit (SKU), i.e. a merchant-specific identifier for a product or service, or the product to which the offer refers.", - "rdfs:label": "sku" - }, - { - "@id": "schema:validFrom", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:Demand", - "schema:OpeningHoursSpecification", - "schema:Permit" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The date when the item becomes valid.", - "rdfs:label": "validFrom" - }, - { - "@id": "schema:validThrough", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:Demand", - "schema:OpeningHoursSpecification" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The end of the validity of offer, price specification, or opening hours data.", - "rdfs:label": "validThrough" - }, - { - "@id": "schema:warranty", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:WarrantyPromise", - "rdfs:comment": "The warranty promise(s) included in the offer.", - "rdfs:label": "warranty" - } - ] - }, - { - "@id": "schema:EntryPoint", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_ActionCollabClass" - }, - "rdfs:comment": "An entry point, within some Web-based protocol.", - "rdfs:label": "EntryPoint", - "rdfs:subClassOf": "schema:Intangible", - "properties": [ - { - "@id": "schema:application", - "@type": "rdf:Property", - "domainIncludes": "schema:EntryPoint", - "rangeIncludes": "schema:SoftwareApplication", - "rdfs:comment": "An application that can complete the request.", - "rdfs:label": "application" - }, - { - "@id": "schema:contentType", - "@type": "rdf:Property", - "domainIncludes": "schema:EntryPoint", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The supported content type(s) for an EntryPoint response.", - "rdfs:label": "contentType" - }, - { - "@id": "schema:encodingType", - "@type": "rdf:Property", - "domainIncludes": "schema:EntryPoint", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The supported encoding type(s) for an EntryPoint request.", - "rdfs:label": "encodingType" - }, - { - "@id": "schema:httpMethod", - "@type": "rdf:Property", - "domainIncludes": "schema:EntryPoint", - "rangeIncludes": "schema:Text", - "rdfs:comment": "An HTTP method that specifies the appropriate HTTP method for a request to an HTTP EntryPoint. Values are capitalized strings as used in HTTP.", - "rdfs:label": "httpMethod" - }, - { - "@id": "schema:urlTemplate", - "@type": "rdf:Property", - "domainIncludes": "schema:EntryPoint", - "rangeIncludes": "schema:Text", - "rdfs:comment": "An url template (RFC6570) that will be used to construct the target of the execution of the action.", - "rdfs:label": "urlTemplate" - } - ] - }, - { - "@id": "schema:Enumeration", - "@type": "rdfs:Class", - "rdfs:comment": "Lists or enumerations—for example, a list of cuisines or music genres, etc.", - "rdfs:label": "Enumeration", - "rdfs:subClassOf": "schema:Intangible", - "children": [ - { - "@id": "schema:ActionStatusType", - "@type": "rdfs:Class", - "rdfs:comment": "The status of an Action.", - "rdfs:label": "ActionStatusType", - "rdfs:subClassOf": "schema:Enumeration" - }, - { - "@id": "schema:BookFormatType", - "@type": "rdfs:Class", - "rdfs:comment": "The publication format of the book.", - "rdfs:label": "BookFormatType", - "rdfs:subClassOf": "schema:Enumeration" - }, - { - "@id": "schema:BusinessEntityType", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A business entity type is a conceptual entity representing the legal form, the size, the main line of business, the position in the value chain, or any combination thereof, of an organization or business person.\n\n Commonly used values:\n\n http://purl.org/goodrelations/v1#Business\n http://purl.org/goodrelations/v1#Enduser\n http://purl.org/goodrelations/v1#PublicInstitution\n http://purl.org/goodrelations/v1#Reseller\n\n \t\t", - "rdfs:label": "BusinessEntityType", - "rdfs:subClassOf": "schema:Enumeration" - }, - { - "@id": "schema:BusinessFunction", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "The business function specifies the type of activity or access (i.e., the bundle of rights) offered by the organization or business person through the offer. Typical are sell, rental or lease, maintenance or repair, manufacture / produce, recycle / dispose, engineering / construction, or installation. Proprietary specifications of access rights are also instances of this class.\n\n Commonly used values:\n\n http://purl.org/goodrelations/v1#ConstructionInstallation\n http://purl.org/goodrelations/v1#Dispose\n http://purl.org/goodrelations/v1#LeaseOut\n http://purl.org/goodrelations/v1#Maintain\n http://purl.org/goodrelations/v1#ProvideService\n http://purl.org/goodrelations/v1#Repair\n http://purl.org/goodrelations/v1#Sell\n http://purl.org/goodrelations/v1#Buy\n \t\t", - "rdfs:label": "BusinessFunction", - "rdfs:subClassOf": "schema:Enumeration" - }, - { - "@id": "schema:ContactPointOption", - "@type": "rdfs:Class", - "rdfs:comment": "Enumerated options related to a ContactPoint", - "rdfs:label": "ContactPointOption", - "rdfs:subClassOf": "schema:Enumeration" - }, - { - "@id": "schema:DayOfWeek", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "The day of the week, e.g. used to specify to which day the opening hours of an OpeningHoursSpecification refer.\n\n Commonly used values:\n\n http://purl.org/goodrelations/v1#Monday\n http://purl.org/goodrelations/v1#Tuesday\n http://purl.org/goodrelations/v1#Wednesday\n http://purl.org/goodrelations/v1#Thursday\n http://purl.org/goodrelations/v1#Friday\n http://purl.org/goodrelations/v1#Saturday\n http://purl.org/goodrelations/v1#Sunday\n http://purl.org/goodrelations/v1#PublicHolidays\n \t\t", - "rdfs:label": "DayOfWeek", - "rdfs:subClassOf": "schema:Enumeration" - }, - { - "@id": "schema:DeliveryMethod", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A delivery method is a standardized procedure for transferring the product or service to the destination of fulfillment chosen by the customer. Delivery methods are characterized by the means of transportation used, and by the organization or group that is the contracting party for the sending organization or person.\n\n Commonly used values:\n\n http://purl.org/goodrelations/v1#DeliveryModeDirectDownload\n http://purl.org/goodrelations/v1#DeliveryModeFreight\n http://purl.org/goodrelations/v1#DeliveryModeMail\n http://purl.org/goodrelations/v1#DeliveryModeOwnFleet\n http://purl.org/goodrelations/v1#DeliveryModePickUp\n http://purl.org/goodrelations/v1#DHL\n http://purl.org/goodrelations/v1#FederalExpress\n http://purl.org/goodrelations/v1#UPS\n \t\t", - "rdfs:label": "DeliveryMethod", - "rdfs:subClassOf": "schema:Enumeration", - "children": [ - { - "@id": "schema:LockerDelivery", - "@type": "rdfs:Class", - "rdfs:comment": "A DeliveryMethod in which an item is made available via locker.", - "rdfs:label": "LockerDelivery", - "rdfs:subClassOf": "schema:DeliveryMethod" - }, - { - "@id": "schema:ParcelService", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A private parcel service as the delivery mode available for a certain offer.\n\n Commonly used values:\n\n http://purl.org/goodrelations/v1#DHL\n http://purl.org/goodrelations/v1#FederalExpress\n http://purl.org/goodrelations/v1#UPS\n \t", - "rdfs:label": "ParcelService", - "rdfs:subClassOf": "schema:DeliveryMethod" - } - ] - }, - { - "@id": "schema:DrugCostCategory", - "@type": "rdfs:Class", - "rdfs:comment": "Enumerated categories of medical drug costs.", - "rdfs:label": "DrugCostCategory", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:DrugPregnancyCategory", - "@type": "rdfs:Class", - "rdfs:comment": "Categories that represent an assessment of the risk of fetal injury due to a drug or pharmaceutical used as directed by the mother during pregnancy.", - "rdfs:label": "DrugPregnancyCategory", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:DrugPrescriptionStatus", - "@type": "rdfs:Class", - "rdfs:comment": "Indicates whether this drug is available by prescription or over-the-counter.", - "rdfs:label": "DrugPrescriptionStatus", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:EventStatusType", - "@type": "rdfs:Class", - "rdfs:comment": "EventStatusType is an enumeration type whose instances represent several states that an Event may be in.", - "rdfs:label": "EventStatusType", - "rdfs:subClassOf": "schema:Enumeration" - }, - { - "@id": "schema:InfectiousAgentClass", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Classes of agents or pathogens that transmit infectious diseases. Enumerated type.", - "rdfs:label": "InfectiousAgentClass", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:ItemAvailability", - "@type": "rdfs:Class", - "rdfs:comment": "A list of possible product availability options.", - "rdfs:label": "ItemAvailability", - "rdfs:subClassOf": "schema:Enumeration" - }, - { - "@id": "schema:MedicalAudience", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Target audiences for medical web pages. Enumerated type.", - "rdfs:label": "MedicalAudience", - "rdfs:subClassOf": [ - "schema:Audience", - "schema:MedicalEnumeration", - "schema:Enumeration", - "schema:PeopleAudience" - ] - }, - { - "@id": "schema:MedicalDevicePurpose", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Categories of medical devices, organized by the purpose or intended use of the device.", - "rdfs:label": "MedicalDevicePurpose", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalEnumeration", - "@type": "rdfs:Class", - "rdfs:comment": "Enumerations related to health and the practice of medicine.", - "rdfs:label": "MedicalEnumeration", - "rdfs:subClassOf": [ - "schema:MedicalIntangible", - "schema:Enumeration" - ], - "children": [ - { - "@id": "schema:DrugCostCategory", - "@type": "rdfs:Class", - "rdfs:comment": "Enumerated categories of medical drug costs.", - "rdfs:label": "DrugCostCategory", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:DrugPregnancyCategory", - "@type": "rdfs:Class", - "rdfs:comment": "Categories that represent an assessment of the risk of fetal injury due to a drug or pharmaceutical used as directed by the mother during pregnancy.", - "rdfs:label": "DrugPregnancyCategory", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:DrugPrescriptionStatus", - "@type": "rdfs:Class", - "rdfs:comment": "Indicates whether this drug is available by prescription or over-the-counter.", - "rdfs:label": "DrugPrescriptionStatus", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:InfectiousAgentClass", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Classes of agents or pathogens that transmit infectious diseases. Enumerated type.", - "rdfs:label": "InfectiousAgentClass", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalAudience", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Target audiences for medical web pages. Enumerated type.", - "rdfs:label": "MedicalAudience", - "rdfs:subClassOf": [ - "schema:Audience", - "schema:MedicalEnumeration", - "schema:Enumeration", - "schema:PeopleAudience" - ] - }, - { - "@id": "schema:MedicalDevicePurpose", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Categories of medical devices, organized by the purpose or intended use of the device.", - "rdfs:label": "MedicalDevicePurpose", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalEvidenceLevel", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Level of evidence for a medical guideline. Enumerated type.", - "rdfs:label": "MedicalEvidenceLevel", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalImagingTechnique", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any medical imaging modality typically used for diagnostic purposes. Enumerated type.", - "rdfs:label": "MedicalImagingTechnique", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalObservationalStudyDesign", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Design models for observational medical studies. Enumerated type.", - "rdfs:label": "MedicalObservationalStudyDesign", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalProcedureType", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "An enumeration that describes different types of medical procedures.", - "rdfs:label": "MedicalProcedureType", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalSpecialty", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any specific branch of medical science or practice. Medical specialities include clinical specialties that pertain to particular organ systems and their respective disease states, as well as allied health specialties. Enumerated type.", - "rdfs:label": "MedicalSpecialty", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration", - "schema:Specialty" - ] - }, - { - "@id": "schema:MedicalStudyStatus", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "The status of a medical study. Enumerated type.", - "rdfs:label": "MedicalStudyStatus", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalTrialDesign", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Design models for medical trials. Enumerated type.", - "rdfs:label": "MedicalTrialDesign", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicineSystem", - "@type": "rdfs:Class", - "rdfs:comment": "Systems of medical practice.", - "rdfs:label": "MedicineSystem", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:PhysicalActivityCategory", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Categories of physical activity, organized by physiologic classification.", - "rdfs:label": "PhysicalActivityCategory", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:PhysicalExam", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A type of physical examination of a patient performed by a physician. Enumerated type.", - "rdfs:label": "PhysicalExam", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - } - ] - }, - { - "@id": "schema:MedicalEvidenceLevel", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Level of evidence for a medical guideline. Enumerated type.", - "rdfs:label": "MedicalEvidenceLevel", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalImagingTechnique", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any medical imaging modality typically used for diagnostic purposes. Enumerated type.", - "rdfs:label": "MedicalImagingTechnique", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalObservationalStudyDesign", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Design models for observational medical studies. Enumerated type.", - "rdfs:label": "MedicalObservationalStudyDesign", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalProcedureType", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "An enumeration that describes different types of medical procedures.", - "rdfs:label": "MedicalProcedureType", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalSpecialty", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any specific branch of medical science or practice. Medical specialities include clinical specialties that pertain to particular organ systems and their respective disease states, as well as allied health specialties. Enumerated type.", - "rdfs:label": "MedicalSpecialty", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration", - "schema:Specialty" - ] - }, - { - "@id": "schema:MedicalStudyStatus", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "The status of a medical study. Enumerated type.", - "rdfs:label": "MedicalStudyStatus", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalTrialDesign", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Design models for medical trials. Enumerated type.", - "rdfs:label": "MedicalTrialDesign", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicineSystem", - "@type": "rdfs:Class", - "rdfs:comment": "Systems of medical practice.", - "rdfs:label": "MedicineSystem", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:OfferItemCondition", - "@type": "rdfs:Class", - "rdfs:comment": "A list of possible conditions for the item.", - "rdfs:label": "OfferItemCondition", - "rdfs:subClassOf": "schema:Enumeration" - }, - { - "@id": "schema:OrderStatus", - "@type": "rdfs:Class", - "rdfs:comment": "Enumerated status values for Order.", - "rdfs:label": "OrderStatus", - "rdfs:subClassOf": "schema:Enumeration" - }, - { - "@id": "schema:PaymentMethod", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A payment method is a standardized procedure for transferring the monetary amount for a purchase. Payment methods are characterized by the legal and technical structures used, and by the organization or group carrying out the transaction.\n\n Commonly used values:\n\n http://purl.org/goodrelations/v1#ByBankTransferInAdvance\n http://purl.org/goodrelations/v1#ByInvoice\n http://purl.org/goodrelations/v1#Cash\n http://purl.org/goodrelations/v1#CheckInAdvance\n http://purl.org/goodrelations/v1#COD\n http://purl.org/goodrelations/v1#DirectDebit\n http://purl.org/goodrelations/v1#GoogleCheckout\n http://purl.org/goodrelations/v1#PayPal\n http://purl.org/goodrelations/v1#PaySwarm\n \t\t", - "rdfs:label": "PaymentMethod", - "rdfs:subClassOf": "schema:Enumeration", - "children": [ - { - "@id": "schema:CreditCard", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A credit or debit card type as a standardized procedure for transferring the monetary amount for a purchase.\n\n Commonly used values:\n\n http://purl.org/goodrelations/v1#AmericanExpress\n http://purl.org/goodrelations/v1#DinersClub\n http://purl.org/goodrelations/v1#Discover\n http://purl.org/goodrelations/v1#JCB\n http://purl.org/goodrelations/v1#MasterCard\n http://purl.org/goodrelations/v1#VISA\n \t\t", - "rdfs:label": "CreditCard", - "rdfs:subClassOf": "schema:PaymentMethod" - } - ] - }, - { - "@id": "schema:PhysicalActivityCategory", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Categories of physical activity, organized by physiologic classification.", - "rdfs:label": "PhysicalActivityCategory", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:PhysicalExam", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A type of physical examination of a patient performed by a physician. Enumerated type.", - "rdfs:label": "PhysicalExam", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:QualitativeValue", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A predefined value for a product characteristic, e.g. the the power cord plug type \"US\" or the garment sizes \"S\", \"M\", \"L\", and \"XL\"", - "rdfs:label": "QualitativeValue", - "rdfs:subClassOf": "schema:Enumeration", - "properties": [ - { - "@id": "schema:equal", - "@type": "rdf:Property", - "domainIncludes": "schema:QualitativeValue", - "rangeIncludes": "schema:QualitativeValue", - "rdfs:comment": "This ordering relation for qualitative values indicates that the subject is equal to the object.", - "rdfs:label": "equal" - }, - { - "@id": "schema:greater", - "@type": "rdf:Property", - "domainIncludes": "schema:QualitativeValue", - "rangeIncludes": "schema:QualitativeValue", - "rdfs:comment": "This ordering relation for qualitative values indicates that the subject is greater than the object.", - "rdfs:label": "greater" - }, - { - "@id": "schema:greaterOrEqual", - "@type": "rdf:Property", - "domainIncludes": "schema:QualitativeValue", - "rangeIncludes": "schema:QualitativeValue", - "rdfs:comment": "This ordering relation for qualitative values indicates that the subject is greater than or equal to the object.", - "rdfs:label": "greaterOrEqual" - }, - { - "@id": "schema:lesser", - "@type": "rdf:Property", - "domainIncludes": "schema:QualitativeValue", - "rangeIncludes": "schema:QualitativeValue", - "rdfs:comment": "This ordering relation for qualitative values indicates that the subject is lesser than the object.", - "rdfs:label": "lesser" - }, - { - "@id": "schema:lesserOrEqual", - "@type": "rdf:Property", - "domainIncludes": "schema:QualitativeValue", - "rangeIncludes": "schema:QualitativeValue", - "rdfs:comment": "This ordering relation for qualitative values indicates that the subject is lesser than or equal to the object.", - "rdfs:label": "lesserOrEqual" - }, - { - "@id": "schema:nonEqual", - "@type": "rdf:Property", - "domainIncludes": "schema:QualitativeValue", - "rangeIncludes": "schema:QualitativeValue", - "rdfs:comment": "This ordering relation for qualitative values indicates that the subject is not equal to the object.", - "rdfs:label": "nonEqual" - }, - { - "@id": "schema:valueReference", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:QualitativeValue", - "schema:QuantitativeValue" - ], - "rangeIncludes": [ - "schema:Enumeration", - "schema:StructuredValue" - ], - "rdfs:comment": "A pointer to a secondary value that provides additional information on the original value, e.g. a reference temperature.", - "rdfs:label": "valueReference" - } - ] - }, - { - "@id": "schema:ReservationStatusType", - "@type": "rdfs:Class", - "rdfs:comment": "Enumerated status values for Reservation.", - "rdfs:label": "ReservationStatusType", - "rdfs:subClassOf": "schema:Enumeration" - }, - { - "@id": "schema:Specialty", - "@type": "rdfs:Class", - "rdfs:comment": "Any branch of a field in which people typically develop specific expertise, usually after significant study, time, and effort.", - "rdfs:label": "Specialty", - "rdfs:subClassOf": "schema:Enumeration", - "children": [ - { - "@id": "schema:MedicalSpecialty", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any specific branch of medical science or practice. Medical specialities include clinical specialties that pertain to particular organ systems and their respective disease states, as well as allied health specialties. Enumerated type.", - "rdfs:label": "MedicalSpecialty", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration", - "schema:Specialty" - ] - } - ] - }, - { - "@id": "schema:WarrantyScope", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A range of of services that will be provided to a customer free of charge in case of a defect or malfunction of a product.\n\n Commonly used values:\n\n http://purl.org/goodrelations/v1#Labor-BringIn\n http://purl.org/goodrelations/v1#PartsAndLabor-BringIn\n http://purl.org/goodrelations/v1#PartsAndLabor-PickUp\n \t", - "rdfs:label": "WarrantyScope", - "rdfs:subClassOf": "schema:Enumeration" - } - ] - }, - { - "@id": "schema:Flight", - "@type": "rdfs:Class", - "rdfs:comment": "An airline flight.", - "rdfs:label": "Flight", - "rdfs:subClassOf": "schema:Intangible", - "properties": [ - { - "@id": "schema:aircraft", - "@type": "rdf:Property", - "domainIncludes": "schema:Flight", - "rangeIncludes": [ - "schema:Text", - "schema:Vehicle" - ], - "rdfs:comment": "The kind of aircraft (e.g., \"Boeing 747\").", - "rdfs:label": "aircraft" - }, - { - "@id": "schema:arrivalAirport", - "@type": "rdf:Property", - "domainIncludes": "schema:Flight", - "rangeIncludes": "schema:Airport", - "rdfs:comment": "The airport where the flight terminates.", - "rdfs:label": "departureAirport" - }, - { - "@id": "schema:arrivalGate", - "@type": "rdf:Property", - "domainIncludes": "schema:Flight", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Identifier of the flight's arrival gate.", - "rdfs:label": "arrivalGate" - }, - { - "@id": "schema:arrivalTerminal", - "@type": "rdf:Property", - "domainIncludes": "schema:Flight", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Identifier of the flight's arrival terminal.", - "rdfs:label": "arrivalTerminal" - }, - { - "@id": "schema:arrivalTime", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:BusTrip", - "schema:Flight", - "schema:TrainTrip" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The expected arrival time.", - "rdfs:label": "arrivalTime" - }, - { - "@id": "schema:carrier", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:ParcelDelivery", - "schema:Flight" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The party responsible for the parcel delivery.", - "rdfs:label": "carrier" - }, - { - "@id": "schema:departureAirport", - "@type": "rdf:Property", - "domainIncludes": "schema:Flight", - "rangeIncludes": "schema:Airport", - "rdfs:comment": "The airport where the flight originates.", - "rdfs:label": "departureAirport" - }, - { - "@id": "schema:departureGate", - "@type": "rdf:Property", - "domainIncludes": "schema:Flight", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Identifier of the flight's departure gate.", - "rdfs:label": "departureGate" - }, - { - "@id": "schema:departureTerminal", - "@type": "rdf:Property", - "domainIncludes": "schema:Flight", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Identifier of the flight's departure terminal.", - "rdfs:label": "departureTerminal" - }, - { - "@id": "schema:departureTime", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:BusTrip", - "schema:Flight", - "schema:TrainTrip" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The expected departure time.", - "rdfs:label": "departureTime" - }, - { - "@id": "schema:estimatedFlightDuration", - "@type": "rdf:Property", - "domainIncludes": "schema:Flight", - "rangeIncludes": [ - "schema:Text", - "schema:Duration" - ], - "rdfs:comment": "The estimated time the flight will take.", - "rdfs:label": "estimatedFlightDuration" - }, - { - "@id": "schema:flightDistance", - "@type": "rdf:Property", - "domainIncludes": "schema:Flight", - "rangeIncludes": [ - "schema:Text", - "schema:Distance" - ], - "rdfs:comment": "The distance of the flight.", - "rdfs:label": "flightDistance" - }, - { - "@id": "schema:flightNumber", - "@type": "rdf:Property", - "domainIncludes": "schema:Flight", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The unique identifier for a flight, not including the airline IATA code. For example, if describing United flight 110, the flightNumber is '110'. The IATA code can be set on the Airline.", - "rdfs:label": "flightNumber" - }, - { - "@id": "schema:mealService", - "@type": "rdf:Property", - "domainIncludes": "schema:Flight", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Description of the meals that will be provided or available for purchase.", - "rdfs:label": "mealService" - }, - { - "@id": "schema:provider", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Service", - "schema:Reservation", - "schema:Flight", - "schema:TrainTrip", - "schema:BusTrip" - ], - "rangeIncludes": [ - "schema:Person", - "schema:Organization" - ], - "rdfs:comment": "The person or organization providing the service, reservation, or creative work. The provider may subcontract out the service.", - "rdfs:label": "provider" - }, - { - "@id": "schema:webCheckinTime", - "@type": "rdf:Property", - "domainIncludes": "schema:Flight", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The time when a passenger can check into the flight online.", - "rdfs:label": "webCheckinTime" - } - ] - }, - { - "@id": "schema:JobPosting", - "@type": "rdfs:Class", - "rdfs:comment": "A listing that describes a job opening in a certain organization.", - "rdfs:label": "JobPosting", - "rdfs:subClassOf": "schema:Intangible", - "properties": [ - { - "@id": "schema:baseSalary", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Number", - "rdfs:comment": "The base salary of the job.", - "rdfs:label": "baseSalary" - }, - { - "@id": "schema:benefits", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Description of benefits associated with the job.", - "rdfs:label": "benefits" - }, - { - "@id": "schema:datePosted", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Date", - "rdfs:comment": "Publication date for the job posting.", - "rdfs:label": "datePosted" - }, - { - "@id": "schema:educationRequirements", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Educational background needed for the position.", - "rdfs:label": "educationRequirements" - }, - { - "@id": "schema:employmentType", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Type of employment (e.g. full-time, part-time, contract, temporary, seasonal, internship).", - "rdfs:label": "employmentType" - }, - { - "@id": "schema:experienceRequirements", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Description of skills and experience needed for the position.", - "rdfs:label": "experienceRequirements" - }, - { - "@id": "schema:hiringOrganization", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Organization", - "rdfs:comment": "Organization offering the job position.", - "rdfs:label": "hiringOrganization" - }, - { - "@id": "schema:incentives", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Description of bonus and commission compensation aspects of the job.", - "rdfs:label": "incentives" - }, - { - "@id": "schema:industry", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The industry associated with the job position.", - "rdfs:label": "industry" - }, - { - "@id": "schema:jobLocation", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Place", - "rdfs:comment": "A (typically single) geographic location associated with the job position.", - "rdfs:label": "jobLocation" - }, - { - "@id": "schema:occupationalCategory", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Category or categories describing the job. Use BLS O*NET-SOC taxonomy: http://www.onetcenter.org/taxonomy.html. Ideally includes textual label and formal code, with the property repeated for each applicable value.", - "rdfs:label": "occupationalCategory" - }, - { - "@id": "schema:qualifications", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Specific qualifications required for this role.", - "rdfs:label": "qualifications" - }, - { - "@id": "schema:responsibilities", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Responsibilities associated with this role.", - "rdfs:label": "responsibilities" - }, - { - "@id": "schema:salaryCurrency", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The currency (coded using ISO 4217, http://en.wikipedia.org/wiki/ISO_4217 used for the main salary information in this job posting.", - "rdfs:label": "salaryCurrency" - }, - { - "@id": "schema:skills", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Skills required to fulfill this role.", - "rdfs:label": "skills" - }, - { - "@id": "schema:specialCommitments", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Any special commitments associated with this job posting. Valid entries include VeteranCommit, MilitarySpouseCommit, etc.", - "rdfs:label": "specialCommitments" - }, - { - "@id": "schema:title", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The title of the job.", - "rdfs:label": "title" - }, - { - "@id": "schema:workHours", - "@type": "rdf:Property", - "domainIncludes": "schema:JobPosting", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The typical working hours for this job (e.g. 1st shift, night shift, 8am-5pm).", - "rdfs:label": "workHours" - } - ] - }, - { - "@id": "schema:Language", - "@type": "rdfs:Class", - "rdfs:comment": "Natural languages such as Spanish, Tamil, Hindi, English, etc. and programming languages such as Scheme and Lisp.", - "rdfs:label": "Language", - "rdfs:subClassOf": "schema:Intangible" - }, - { - "@id": "schema:Offer", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsProperties" - }, - "rdfs:comment": "An offer to transfer some rights to an item or to provide a service—for example, an offer to sell tickets to an event, to rent the DVD of a movie, to stream a TV show over the internet, to repair a motorcycle, or to loan a book.", - "rdfs:label": "Offer", - "rdfs:subClassOf": "schema:Intangible", - "children": [ - { - "@id": "schema:AggregateOffer", - "@type": "rdfs:Class", - "rdfs:comment": "When a single product is associated with multiple offers (for example, the same pair of shoes is offered by different merchants), then AggregateOffer can be used.", - "rdfs:label": "AggregateOffer", - "rdfs:subClassOf": "schema:Offer", - "properties": [ - { - "@id": "schema:highPrice", - "@type": "rdf:Property", - "domainIncludes": "schema:AggregateOffer", - "rangeIncludes": [ - "schema:Number", - "schema:Text" - ], - "rdfs:comment": "The highest price of all offers available.", - "rdfs:label": "highPrice" - }, - { - "@id": "schema:lowPrice", - "@type": "rdf:Property", - "domainIncludes": "schema:AggregateOffer", - "rangeIncludes": [ - "schema:Number", - "schema:Text" - ], - "rdfs:comment": "The lowest price of all offers available.", - "rdfs:label": "lowPrice" - }, - { - "@id": "schema:offerCount", - "@type": "rdf:Property", - "domainIncludes": "schema:AggregateOffer", - "rangeIncludes": "schema:Integer", - "rdfs:comment": "The number of offers for the product.", - "rdfs:label": "offerCount" - } - ] - } - ], - "properties": [ - { - "@id": "schema:acceptedPaymentMethod", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:PaymentMethod", - "rdfs:comment": "The payment method(s) accepted by seller for this offer.", - "rdfs:label": "acceptedPaymentMethod" - }, - { - "@id": "schema:addOn", - "@type": "rdf:Property", - "domainIncludes": "schema:Offer", - "rangeIncludes": "schema:Offer", - "rdfs:comment": "An additional offer that can only be obtained in combination with the first base offer (e.g. supplements and extensions that are available for a surcharge).", - "rdfs:label": "addOn" - }, - { - "@id": "schema:advanceBookingRequirement", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The amount of time that is required between accepting the offer and the actual usage of the resource or service.", - "rdfs:label": "advanceBookingRequirement" - }, - { - "@id": "schema:aggregateRating", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:Offer", - "schema:Product" - ], - "rangeIncludes": "schema:AggregateRating", - "rdfs:comment": "The overall rating, based on a collection of reviews or ratings, of the item.", - "rdfs:label": "aggregateRating" - }, - { - "@id": "schema:availability", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:ItemAvailability", - "rdfs:comment": "The availability of this item—for example In stock, Out of stock, Pre-order, etc.", - "rdfs:label": "availability" - }, - { - "@id": "schema:availabilityEnds", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The end of the availability of the product or service included in the offer.", - "rdfs:label": "availabilityEnds" - }, - { - "@id": "schema:availabilityStarts", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The beginning of the availability of the product or service included in the offer.", - "rdfs:label": "availabilityStarts" - }, - { - "@id": "schema:availableAtOrFrom", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:Place", - "rdfs:comment": "The place(s) from which the offer can be obtained (e.g. store locations).", - "rdfs:label": "availableAtOrFrom" - }, - { - "@id": "schema:availableDeliveryMethod", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:DeliveryMethod", - "rdfs:comment": "The delivery method(s) available for this offer.", - "rdfs:label": "availableDeliveryMethod" - }, - { - "@id": "schema:businessFunction", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand", - "schema:TypeAndQuantityNode" - ], - "rangeIncludes": "schema:BusinessFunction", - "rdfs:comment": "The business function (e.g. sell, lease, repair, dispose) of the offer or component of a bundle (TypeAndQuantityNode). The default is http://purl.org/goodrelations/v1#Sell.", - "rdfs:label": "businessFunction" - }, - { - "@id": "schema:category", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PhysicalActivity" - ], - "rangeIncludes": [ - "schema:PhysicalActivityCategory", - "schema:Text", - "schema:Thing" - ], - "rdfs:comment": "A category for the item. Greater signs or slashes can be used to informally indicate a category hierarchy.", - "rdfs:label": "category" - }, - { - "@id": "schema:deliveryLeadTime", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The typical delay between the receipt of the order and the goods leaving the warehouse.", - "rdfs:label": "deliveryLeadTime" - }, - { - "@id": "schema:eligibleCustomerType", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:BusinessEntityType", - "rdfs:comment": "The type(s) of customers for which the given offer is valid.", - "rdfs:label": "eligibleCustomerType" - }, - { - "@id": "schema:eligibleDuration", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The duration for which the given offer is valid.", - "rdfs:label": "eligibleDuration" - }, - { - "@id": "schema:eligibleQuantity", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:Demand" - ], - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The interval and unit of measurement of ordering quantities for which the offer or price specification is valid. This allows e.g. specifying that a certain freight charge is valid only for a certain quantity.", - "rdfs:label": "eligibleQuantity" - }, - { - "@id": "schema:eligibleRegion", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:DeliveryChargeSpecification", - "schema:Demand" - ], - "rangeIncludes": [ - "schema:GeoShape", - "schema:Text" - ], - "rdfs:comment": "The ISO 3166-1 (ISO 3166-1 alpha-2) or ISO 3166-2 code, or the GeoShape for the geo-political region(s) for which the offer or delivery charge specification is valid.", - "rdfs:label": "eligibleRegion" - }, - { - "@id": "schema:eligibleTransactionVolume", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:Demand" - ], - "rangeIncludes": "schema:PriceSpecification", - "rdfs:comment": "The transaction volume, in a monetary unit, for which the offer or price specification is valid, e.g. for indicating a minimal purchasing volume, to express free shipping above a certain order volume, or to limit the acceptance of credit cards to purchases to a certain minimal amount.", - "rdfs:label": "eligibleTransactionVolume" - }, - { - "@id": "schema:gtin13", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The GTIN-13 code of the product, or the product to which the offer refers. This is equivalent to 13-digit ISBN codes and EAN UCC-13. Former 12-digit UPC codes can be converted into a GTIN-13 code by simply adding a preceeding zero.", - "rdfs:label": "gtin13" - }, - { - "@id": "schema:gtin14", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The GTIN-14 code of the product, or the product to which the offer refers.", - "rdfs:label": "gtin14" - }, - { - "@id": "schema:gtin8", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The GTIN-8 code of the product, or the product to which the offer refers. This code is also known as EAN/UCC-8 or 8-digit EAN.", - "rdfs:label": "gtin8" - }, - { - "@id": "schema:includesObject", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:TypeAndQuantityNode", - "rdfs:comment": "This links to a node or nodes indicating the exact quantity of the products included in the offer.", - "rdfs:label": "includesObject" - }, - { - "@id": "schema:inventoryLevel", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand", - "schema:SomeProducts" - ], - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The current approximate inventory level for the item or items.", - "rdfs:label": "inventoryLevel" - }, - { - "@id": "schema:itemCondition", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:OfferItemCondition", - "rdfs:comment": "A predefined value from OfferItemCondition or a textual description of the condition of the product or service, or the products or services included in the offer.", - "rdfs:label": "itemCondition" - }, - { - "@id": "schema:itemOffered", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:Product", - "rdfs:comment": "The item being offered.", - "rdfs:label": "itemOffered" - }, - { - "@id": "schema:mpn", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The Manufacturer Part Number (MPN) of the product, or the product to which the offer refers.", - "rdfs:label": "mpn" - }, - { - "@id": "schema:price", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:TradeAction" - ], - "rangeIncludes": [ - "schema:Number", - "schema:Text" - ], - "rdfs:comment": "The offer price of a product, or of a price component when attached to PriceSpecification and its subtypes.", - "rdfs:label": "price" - }, - { - "@id": "schema:priceCurrency", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Reservation", - "schema:Ticket", - "schema:Offer", - "schema:PriceSpecification" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The currency (in 3-letter ISO 4217 format) of the price or a price component, when attached to PriceSpecification and its subtypes.", - "rdfs:label": "priceCurrency" - }, - { - "@id": "schema:priceSpecification", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:PriceSpecification", - "rdfs:comment": "One or more detailed price specifications, indicating the unit price and delivery or payment charges.", - "rdfs:label": "priceSpecification" - }, - { - "@id": "schema:priceValidUntil", - "@type": "rdf:Property", - "domainIncludes": "schema:Offer", - "rangeIncludes": "schema:Date", - "rdfs:comment": "The date after which the price is no longer available.", - "rdfs:label": "priceValidUntil" - }, - { - "@id": "schema:review", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:Offer", - "schema:Product" - ], - "rangeIncludes": "schema:Review", - "rdfs:comment": "A review of the item.", - "rdfs:label": "review" - }, - { - "@id": "schema:reviews", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:Offer", - "schema:Product" - ], - "rangeIncludes": "schema:Review", - "rdfs:comment": "Review of the item (legacy spelling; see singular form, review).", - "rdfs:label": "reviews", - "supercededBy": "schema:review" - }, - { - "@id": "schema:seller", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "The organization or person making the offer.", - "rdfs:label": "seller" - }, - { - "@id": "schema:serialNumber", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand", - "schema:IndividualProduct" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The serial number or any alphanumeric identifier of a particular product. When attached to an offer, it is a shortcut for the serial number of the product included in the offer.", - "rdfs:label": "serialNumber" - }, - { - "@id": "schema:sku", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The Stock Keeping Unit (SKU), i.e. a merchant-specific identifier for a product or service, or the product to which the offer refers.", - "rdfs:label": "sku" - }, - { - "@id": "schema:validFrom", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:Demand", - "schema:OpeningHoursSpecification", - "schema:Permit" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The date when the item becomes valid.", - "rdfs:label": "validFrom" - }, - { - "@id": "schema:validThrough", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:Demand", - "schema:OpeningHoursSpecification" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The end of the validity of offer, price specification, or opening hours data.", - "rdfs:label": "validThrough" - }, - { - "@id": "schema:warranty", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand" - ], - "rangeIncludes": "schema:WarrantyPromise", - "rdfs:comment": "The warranty promise(s) included in the offer.", - "rdfs:label": "warranty" - } - ] - }, - { - "@id": "schema:Order", - "@type": "rdfs:Class", - "rdfs:comment": "An order is a confirmation of a transaction (a receipt), which can contain multiple line items, each represented by an Offer that has been accepted by the customer.", - "rdfs:label": "Order", - "rdfs:subClassOf": "schema:Intangible", - "properties": [ - { - "@id": "schema:acceptedOffer", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": "schema:Offer", - "rdfs:comment": "The offer(s) -- e.g., product, quantity and price combinations -- included in the order.", - "rdfs:label": "acceptedOffer" - }, - { - "@id": "schema:billingAddress", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": "schema:PostalAddress", - "rdfs:comment": "The billing address for the order.", - "rdfs:label": "billingAddress" - }, - { - "@id": "schema:confirmationNumber", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A number that confirms the given order.", - "rdfs:label": "confirmationNumber" - }, - { - "@id": "schema:customer", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "Party placing the order.", - "rdfs:label": "customer" - }, - { - "@id": "schema:discount", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": [ - "schema:Number", - "schema:Text" - ], - "rdfs:comment": "Any discount applied (to an Order).", - "rdfs:label": "discount" - }, - { - "@id": "schema:discountCode", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Code used to redeem a discount.", - "rdfs:label": "discountCode" - }, - { - "@id": "schema:discountCurrency", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The currency (in 3-letter ISO 4217 format) of the discount.", - "rdfs:label": "discountCurrency" - }, - { - "@id": "schema:isGift", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": "schema:Boolean", - "rdfs:comment": "Was the offer accepted as a gift for someone other than the buyer.", - "rdfs:label": "isGift" - }, - { - "@id": "schema:merchant", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "The party taking the order (e.g. Amazon.com is a merchant for many sellers).", - "rdfs:label": "merchant" - }, - { - "@id": "schema:orderDate", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "Date order was placed.", - "rdfs:label": "orderDate" - }, - { - "@id": "schema:orderNumber", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The identifier of the transaction.", - "rdfs:label": "orderNumber" - }, - { - "@id": "schema:orderStatus", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": "schema:OrderStatus", - "rdfs:comment": "The current status of the order.", - "rdfs:label": "orderStatus" - }, - { - "@id": "schema:orderedItem", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": "schema:Product", - "rdfs:comment": "The item ordered.", - "rdfs:label": "orderedItem" - }, - { - "@id": "schema:paymentDue", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The date that payment is due.", - "rdfs:label": "paymentDue" - }, - { - "@id": "schema:paymentMethod", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": "schema:PaymentMethod", - "rdfs:comment": "The name of the credit card or other method of payment for the order.", - "rdfs:label": "paymentMethod" - }, - { - "@id": "schema:paymentMethodId", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": "schema:Text", - "rdfs:comment": "An identifier for the method of payment used (e.g. the last 4 digits of the credit card).", - "rdfs:label": "paymentMethodId" - }, - { - "@id": "schema:paymentUrl", - "@type": "rdf:Property", - "domainIncludes": "schema:Order", - "rangeIncludes": "schema:URL", - "rdfs:comment": "The URL for sending a payment.", - "rdfs:label": "paymentUrl" - } - ] - }, - { - "@id": "schema:ParcelDelivery", - "@type": "rdfs:Class", - "rdfs:comment": "The delivery of a parcel either via the postal service or a commercial service.", - "rdfs:label": "ParcelDelivery", - "rdfs:subClassOf": "schema:Intangible", - "properties": [ - { - "@id": "schema:carrier", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:ParcelDelivery", - "schema:Flight" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The party responsible for the parcel delivery.", - "rdfs:label": "carrier" - }, - { - "@id": "schema:deliveryAddress", - "@type": "rdf:Property", - "domainIncludes": "schema:ParcelDelivery", - "rangeIncludes": "schema:PostalAddress", - "rdfs:comment": "Destination address.", - "rdfs:label": "deliveryAddress" - }, - { - "@id": "schema:deliveryStatus", - "@type": "rdf:Property", - "domainIncludes": "schema:ParcelDelivery", - "rangeIncludes": "schema:DeliveryEvent", - "rdfs:comment": "New entry added as the package passes through each leg of its journey (from shipment to final delivery).", - "rdfs:label": "deliveryStatus" - }, - { - "@id": "schema:expectedArrivalFrom", - "@type": "rdf:Property", - "domainIncludes": "schema:ParcelDelivery", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The earliest date the package may arrive.", - "rdfs:label": "expectedArrivalFrom" - }, - { - "@id": "schema:expectedArrivalUntil", - "@type": "rdf:Property", - "domainIncludes": "schema:ParcelDelivery", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The latest date the package may arrive.", - "rdfs:label": "expectedArrivalUntil" - }, - { - "@id": "schema:hasDeliveryMethod", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DeliveryEvent", - "schema:ParcelDelivery" - ], - "rangeIncludes": "schema:DeliveryMethod", - "rdfs:comment": "Method used for delivery or shipping.", - "rdfs:label": "hasDeliveryMethod" - }, - { - "@id": "schema:itemShipped", - "@type": "rdf:Property", - "domainIncludes": "schema:ParcelDelivery", - "rangeIncludes": "schema:Product", - "rdfs:comment": "Item(s) being shipped.", - "rdfs:label": "itemShipped" - }, - { - "@id": "schema:originAddress", - "@type": "rdf:Property", - "domainIncludes": "schema:ParcelDelivery", - "rangeIncludes": "schema:PostalAddress", - "rdfs:comment": "Shipper's address.", - "rdfs:label": "originAddress" - }, - { - "@id": "schema:partOfOrder", - "@type": "rdf:Property", - "domainIncludes": "schema:ParcelDelivery", - "rangeIncludes": "schema:Order", - "rdfs:comment": "The overall order the items in this delivery were included in.", - "rdfs:label": "partOfOrder" - }, - { - "@id": "schema:trackingNumber", - "@type": "rdf:Property", - "domainIncludes": "schema:ParcelDelivery", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Shipper tracking number.", - "rdfs:label": "trackingNumber" - }, - { - "@id": "schema:trackingUrl", - "@type": "rdf:Property", - "domainIncludes": "schema:ParcelDelivery", - "rangeIncludes": "schema:URL", - "rdfs:comment": "Tracking url for the parcel delivery.", - "rdfs:label": "trackingUrl" - } - ] - }, - { - "@id": "schema:Permit", - "@type": "rdfs:Class", - "rdfs:comment": "A permit issued by an organization, e.g. a parking pass.", - "rdfs:label": "Permit", - "rdfs:subClassOf": "schema:Intangible", - "children": [ - { - "@id": "schema:GovernmentPermit", - "@type": "rdfs:Class", - "rdfs:comment": "A permit issued by a government agency.", - "rdfs:label": "GovernmentPermit", - "rdfs:subClassOf": "schema:Permit" - } - ], - "properties": [ - { - "@id": "schema:issuedBy", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Ticket", - "schema:Permit" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The organization issuing the ticket or permit.", - "rdfs:label": "issuedBy" - }, - { - "@id": "schema:issuedThrough", - "@type": "rdf:Property", - "domainIncludes": "schema:Permit", - "rangeIncludes": "schema:Service", - "rdfs:comment": "The service through with the permit was granted.", - "rdfs:label": "issuedThrough" - }, - { - "@id": "schema:permitAudience", - "@type": "rdf:Property", - "domainIncludes": "schema:Permit", - "rangeIncludes": "schema:Audience", - "rdfs:comment": "The target audience for this permit.", - "rdfs:label": "permitAudience" - }, - { - "@id": "schema:validFor", - "@type": "rdf:Property", - "domainIncludes": "schema:Permit", - "rangeIncludes": "schema:Duration", - "rdfs:comment": "The time validity of the permit.", - "rdfs:label": "validFor" - }, - { - "@id": "schema:validFrom", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:Demand", - "schema:OpeningHoursSpecification", - "schema:Permit" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The date when the item becomes valid.", - "rdfs:label": "validFrom" - }, - { - "@id": "schema:validIn", - "@type": "rdf:Property", - "domainIncludes": "schema:Permit", - "rangeIncludes": "schema:AdministrativeArea", - "rdfs:comment": "The geographic area where the permit is valid.", - "rdfs:label": "validIn" - }, - { - "@id": "schema:validUntil", - "@type": "rdf:Property", - "domainIncludes": "schema:Permit", - "rangeIncludes": "schema:Date", - "rdfs:comment": "The date when the item is no longer valid.", - "rdfs:label": "validUntil" - } - ] - }, - { - "@id": "schema:ProgramMembership", - "@type": "rdfs:Class", - "rdfs:comment": "Used to describe membership in a loyalty programs (e.g. \"StarAliance\"), traveler clubs (e.g. \"AAA\"), purchase clubs (\"Safeway Club\"), etc.", - "rdfs:label": "Program Membership", - "rdfs:subClassOf": "schema:Intangible", - "properties": [ - { - "@id": "schema:hostingOrganization", - "@type": "rdf:Property", - "domainIncludes": "schema:ProgramMembership", - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The organization (airline, travelers' club, etc.) the membership is made with.", - "rdfs:label": "hostingOrganization" - }, - { - "@id": "schema:member", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:ProgramMembership" - ], - "inverseOf": "schema:memberOf", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A member of an Organization or a ProgramMembership. Organizations can be members of organizations; ProgramMembership is typically for individuals.", - "rdfs:label": "member" - }, - { - "@id": "schema:members", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:ProgramMembership" - ], - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A member of this organization (legacy spelling; see singular form, member).", - "rdfs:label": "members", - "supercededBy": "schema:member" - }, - { - "@id": "schema:membershipNumber", - "@type": "rdf:Property", - "domainIncludes": "schema:ProgramMembership", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A unique identifier for the membership.", - "rdfs:label": "membershipNumber" - }, - { - "@id": "schema:programName", - "@type": "rdf:Property", - "domainIncludes": "schema:ProgramMembership", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The program providing the membership.", - "rdfs:label": "programName" - } - ] - }, - { - "@id": "schema:Property", - "@type": "rdfs:Class", - "rdfs:comment": "A property, used to indicate attributes and relationships of some Thing; equivalent to rdf:Property.", - "rdfs:label": "Property", - "rdfs:subClassOf": "schema:Intangible", - "properties": [ - { - "@id": "schema:domainIncludes", - "@type": "rdf:Property", - "domainIncludes": "schema:Property", - "rangeIncludes": "schema:Class", - "rdfs:comment": "Relates a property to a class that is (one of) the type(s) the property is expected to be used on.", - "rdfs:label": "domainIncludes" - }, - { - "@id": "schema:inverseOf", - "@type": "rdf:Property", - "domainIncludes": "schema:Property", - "rangeIncludes": "schema:Property", - "rdfs:comment": "Relates a property to a property that is its inverse. Inverse properties relate the same pairs of items to each other, but in reversed direction. For example, the 'alumni' and 'alumniOf' properties are inverseOf each other. Some properties don't have explicit inverses; in these situations RDFa and JSON-LD syntax for reverse properties can be used.", - "rdfs:label": "inverseOf" - }, - { - "@id": "schema:rangeIncludes", - "@type": "rdf:Property", - "domainIncludes": "schema:Property", - "rangeIncludes": "schema:Class", - "rdfs:comment": "Relates a property to a class that constitutes (one of) the expected type(s) for values of the property.", - "rdfs:label": "rangeIncludes" - }, - { - "@id": "schema:supercededBy", - "@type": "rdf:Property", - "domainIncludes": "schema:Property", - "rangeIncludes": "schema:Property", - "rdfs:comment": "Relates a property to one that supercedes it.", - "rdfs:label": "supercededBy" - } - ] - }, - { - "@id": "schema:PropertyValueSpecification", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_ActionCollabClass" - }, - "rdfs:comment": "A Property value specification.", - "rdfs:label": "PropertyValueSpecification", - "rdfs:subClassOf": "schema:Intangible", - "properties": [ - { - "@id": "schema:defaultValue", - "@type": "rdf:Property", - "domainIncludes": "schema:PropertyValueSpecification", - "rangeIncludes": [ - "schema:Thing", - "schema:Text" - ], - "rdfs:comment": "The default value of the input. For properties that expect a literal, the default is a literal value, for properties that expect an object, it's an ID reference to one of the current values.", - "rdfs:label": "defaultValue" - }, - { - "@id": "schema:maxValue", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:QuantitativeValue", - "schema:PropertyValueSpecification" - ], - "rangeIncludes": "schema:Number", - "rdfs:comment": "The upper value of some characteristic or property.", - "rdfs:label": "maxValue" - }, - { - "@id": "schema:minValue", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:QuantitativeValue", - "schema:PropertyValueSpecification" - ], - "rangeIncludes": "schema:Number", - "rdfs:comment": "The lower value of some characteristic or property.", - "rdfs:label": "minValue" - }, - { - "@id": "schema:multipleValues", - "@type": "rdf:Property", - "domainIncludes": "schema:PropertyValueSpecification", - "rangeIncludes": "schema:Boolean", - "rdfs:comment": "Whether multiple values are allowed for the property. Default is false.", - "rdfs:label": "multipleValues" - }, - { - "@id": "schema:readonlyValue", - "@type": "rdf:Property", - "domainIncludes": "schema:PropertyValueSpecification", - "rangeIncludes": "schema:Boolean", - "rdfs:comment": "Whether or not a property is mutable. Default is false. Specifying this for a property that also has a value makes it act similar to a \"hidden\" input in an HTML form.", - "rdfs:label": "readonlyValue" - }, - { - "@id": "schema:stepValue", - "@type": "rdf:Property", - "domainIncludes": "schema:PropertyValueSpecification", - "rangeIncludes": "schema:Number", - "rdfs:comment": "The stepValue attribute indicates the granularity that is expected (and required) of the value in a PropertyValueSpecification.", - "rdfs:label": "stepValue" - }, - { - "@id": "schema:valueMaxLength", - "@type": "rdf:Property", - "domainIncludes": "schema:PropertyValueSpecification", - "rangeIncludes": "schema:Number", - "rdfs:comment": "Specifies the allowed range for number of characters in a literal value.", - "rdfs:label": "valueMaxLength" - }, - { - "@id": "schema:valueMinLength", - "@type": "rdf:Property", - "domainIncludes": "schema:PropertyValueSpecification", - "rangeIncludes": "schema:Number", - "rdfs:comment": "Specifies the minimum allowed range for number of characters in a literal value.", - "rdfs:label": "valueMinLength" - }, - { - "@id": "schema:valueName", - "@type": "rdf:Property", - "domainIncludes": "schema:PropertyValueSpecification", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Indicates the name of the PropertyValueSpecification to be used in URL templates and form encoding in a manner analogous to HTML's input@name.", - "rdfs:label": "valueName" - }, - { - "@id": "schema:valuePattern", - "@type": "rdf:Property", - "domainIncludes": "schema:PropertyValueSpecification", - "rangeIncludes": "schema:Number", - "rdfs:comment": "Specifies a regular expression for testing literal values according to the HTML spec.", - "rdfs:label": "valuePattern" - }, - { - "@id": "schema:valueRequired", - "@type": "rdf:Property", - "domainIncludes": "schema:PropertyValueSpecification", - "rangeIncludes": "schema:Boolean", - "rdfs:comment": "Whether the property must be filled in to complete the action. Default is false.", - "rdfs:label": "valueRequired" - } - ] - }, - { - "@id": "schema:Quantity", - "@type": "rdfs:Class", - "rdfs:comment": "Quantities such as distance, time, mass, weight, etc. Particular instances of say Mass are entities like '3 Kg' or '4 milligrams'.", - "rdfs:label": "Quantity", - "rdfs:subClassOf": "schema:Intangible", - "children": [ - { - "@id": "schema:Distance", - "@type": "rdfs:Class", - "rdfs:comment": "Properties that take Distances as values are of the form '<Number> <Length unit of measure>'. E.g., '7 ft'", - "rdfs:label": "Distance", - "rdfs:subClassOf": "schema:Quantity" - }, - { - "@id": "schema:Duration", - "@type": "rdfs:Class", - "rdfs:comment": "Quantity: Duration (use ISO 8601 duration format).", - "rdfs:label": "Duration", - "rdfs:subClassOf": "schema:Quantity" - }, - { - "@id": "schema:Energy", - "@type": "rdfs:Class", - "rdfs:comment": "Properties that take Energy as values are of the form '<Number> <Energy unit of measure>'", - "rdfs:label": "Energy", - "rdfs:subClassOf": "schema:Quantity" - }, - { - "@id": "schema:Mass", - "@type": "rdfs:Class", - "rdfs:comment": "Properties that take Mass as values are of the form '<Number> <Mass unit of measure>'. E.g., '7 kg'", - "rdfs:label": "Mass", - "rdfs:subClassOf": "schema:Quantity" - } - ] - }, - { - "@id": "schema:Rating", - "@type": "rdfs:Class", - "rdfs:comment": "The rating of the video.", - "rdfs:label": "Rating", - "rdfs:subClassOf": "schema:Intangible", - "children": [ - { - "@id": "schema:AggregateRating", - "@type": "rdfs:Class", - "rdfs:comment": "The average rating based on multiple ratings or reviews.", - "rdfs:label": "AggregateRating", - "rdfs:subClassOf": "schema:Rating", - "properties": [ - { - "@id": "schema:itemReviewed", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:AggregateRating", - "schema:Review" - ], - "rangeIncludes": "schema:Thing", - "rdfs:comment": "The item that is being reviewed/rated.", - "rdfs:label": "itemReviewed" - }, - { - "@id": "schema:ratingCount", - "@type": "rdf:Property", - "domainIncludes": "schema:AggregateRating", - "rangeIncludes": "schema:Number", - "rdfs:comment": "The count of total number of ratings.", - "rdfs:label": "ratingCount" - }, - { - "@id": "schema:reviewCount", - "@type": "rdf:Property", - "domainIncludes": "schema:AggregateRating", - "rangeIncludes": "schema:Number", - "rdfs:comment": "The count of total number of reviews.", - "rdfs:label": "reviewCount" - } - ] - } - ], - "properties": [ - { - "@id": "schema:bestRating", - "@type": "rdf:Property", - "domainIncludes": "schema:Rating", - "rangeIncludes": [ - "schema:Number", - "schema:Text" - ], - "rdfs:comment": "The highest value allowed in this rating system. If bestRating is omitted, 5 is assumed.", - "rdfs:label": "bestRating" - }, - { - "@id": "schema:ratingValue", - "@type": "rdf:Property", - "domainIncludes": "schema:Rating", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The rating for the content.", - "rdfs:label": "ratingValue" - }, - { - "@id": "schema:worstRating", - "@type": "rdf:Property", - "domainIncludes": "schema:Rating", - "rangeIncludes": [ - "schema:Number", - "schema:Text" - ], - "rdfs:comment": "The lowest value allowed in this rating system. If worstRating is omitted, 1 is assumed.", - "rdfs:label": "worstRating" - } - ] - }, - { - "@id": "schema:Reservation", - "@type": "rdfs:Class", - "rdfs:comment": "Describes a reservation for travel, dining or an event. Some reservations require tickets.Note: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. For offers of tickets, restaurant reservations, flights, or rental cars, use http://schema.org/Offer.", - "rdfs:label": "Reservation", - "rdfs:subClassOf": "schema:Intangible", - "children": [ - { - "@id": "schema:BusReservation", - "@type": "rdfs:Class", - "rdfs:comment": "A reservation for bus travel.Note: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. For offers of tickets, use http://schema.org/Offer.", - "rdfs:label": "BusReservation", - "rdfs:subClassOf": "schema:Reservation" - }, - { - "@id": "schema:EventReservation", - "@type": "rdfs:Class", - "rdfs:comment": "A reservation for an event like a concert, sporting event, or lecture.Note: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. For offers of tickets, use http://schema.org/Offer.", - "rdfs:label": "EventReservation", - "rdfs:subClassOf": "schema:Reservation" - }, - { - "@id": "schema:FlightReservation", - "@type": "rdfs:Class", - "rdfs:comment": "A reservation for air travel.Note: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. For offers of tickets, use http://schema.org/Offer.", - "rdfs:label": "FlightReservation", - "rdfs:subClassOf": "schema:Reservation", - "properties": [ - { - "@id": "schema:boardingGroup", - "@type": "rdf:Property", - "domainIncludes": "schema:FlightReservation", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The airline-specific indicator of boarding order / preference.", - "rdfs:label": "boardingGroup" - } - ] - }, - { - "@id": "schema:FoodEstablishmentReservation", - "@type": "rdfs:Class", - "rdfs:comment": "A reservation to dine at a food-related business.Note: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations.", - "rdfs:label": "FoodEstablishmentReservation", - "rdfs:subClassOf": "schema:Reservation", - "properties": [ - { - "@id": "schema:endTime", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Action", - "schema:FoodEstablishmentReservation" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The endTime of something. For a reserved event or service (e.g. FoodEstablishmentReservation), the time that it is expected to end. For actions that span a period of time, when the action was performed. e.g. John wrote a book from January to *December*.\n\nNote that Event uses startDate/endDate instead of startTime/endTime, even when describing dates with times. This situation may be clarified in future revisions.\n", - "rdfs:label": "endTime" - }, - { - "@id": "schema:partySize", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:FoodEstablishmentReservation", - "schema:TaxiReservation" - ], - "rangeIncludes": [ - "schema:Number", - "schema:QuantitativeValue" - ], - "rdfs:comment": "Number of people the reservation should accommodate.", - "rdfs:label": "partySize" - }, - { - "@id": "schema:startTime", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Action", - "schema:FoodEstablishmentReservation" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The startTime of something. For a reserved event or service (e.g. FoodEstablishmentReservation), the time that it is expected to start. For actions that span a period of time, when the action was performed. e.g. John wrote a book from *January* to December.\n\nNote that Event uses startDate/endDate instead of startTime/endTime, even when describing dates with times. This situation may be clarified in future revisions.\n", - "rdfs:label": "startTime" - } - ] - }, - { - "@id": "schema:LodgingReservation", - "@type": "rdfs:Class", - "rdfs:comment": "A reservation for lodging at a hotel, motel, inn, etc.Note: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations.", - "rdfs:label": "LodgingReservation", - "rdfs:subClassOf": "schema:Reservation", - "properties": [ - { - "@id": "schema:checkinTime", - "@type": "rdf:Property", - "domainIncludes": "schema:LodgingReservation", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The earliest someone may check into a lodging establishment.", - "rdfs:label": "checkinTime" - }, - { - "@id": "schema:checkoutTime", - "@type": "rdf:Property", - "domainIncludes": "schema:LodgingReservation", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The latest someone may check out of a lodging establishment.", - "rdfs:label": "checkoutTime" - }, - { - "@id": "schema:lodgingUnitDescription", - "@type": "rdf:Property", - "domainIncludes": "schema:LodgingReservation", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A full description of the lodging unit.", - "rdfs:label": "lodgingUnitDescription" - }, - { - "@id": "schema:lodgingUnitType", - "@type": "rdf:Property", - "domainIncludes": "schema:LodgingReservation", - "rangeIncludes": [ - "schema:Text", - "schema:QualitativeValue" - ], - "rdfs:comment": "Textual description of the unit type (including suite vs. room, size of bed, etc.).", - "rdfs:label": "lodgingUnitType" - }, - { - "@id": "schema:numAdults", - "@type": "rdf:Property", - "domainIncludes": "schema:LodgingReservation", - "rangeIncludes": [ - "schema:Number", - "schema:QuantitativeValue" - ], - "rdfs:comment": "The number of adults staying in the unit.", - "rdfs:label": "numAdults" - }, - { - "@id": "schema:numChildren", - "@type": "rdf:Property", - "domainIncludes": "schema:LodgingReservation", - "rangeIncludes": [ - "schema:Number", - "schema:QuantitativeValue" - ], - "rdfs:comment": "The number of children staying in the unit.", - "rdfs:label": "numChildren" - } - ] - }, - { - "@id": "schema:RentalCarReservation", - "@type": "rdfs:Class", - "rdfs:comment": "A reservation for a rental car.Note: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations.", - "rdfs:label": "RentalCarReservation", - "rdfs:subClassOf": "schema:Reservation", - "properties": [ - { - "@id": "schema:dropoffLocation", - "@type": "rdf:Property", - "domainIncludes": "schema:RentalCarReservation", - "rangeIncludes": "schema:Place", - "rdfs:comment": "Where a rental car can be dropped off.", - "rdfs:label": "dropoffLocation" - }, - { - "@id": "schema:dropoffTime", - "@type": "rdf:Property", - "domainIncludes": "schema:RentalCarReservation", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "When a rental car can be dropped off.", - "rdfs:label": "dropoffTime" - }, - { - "@id": "schema:pickupLocation", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:RentalCarReservation", - "schema:TaxiReservation" - ], - "rangeIncludes": "schema:Place", - "rdfs:comment": "Where a taxi will pick up a passenger or a rental car can be picked up.", - "rdfs:label": "pickupLocation" - }, - { - "@id": "schema:pickupTime", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:RentalCarReservation", - "schema:TaxiReservation" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "When a taxi will pickup a passenger or a rental car can be picked up.", - "rdfs:label": "pickupTime" - } - ] - }, - { - "@id": "schema:ReservationPackage", - "@type": "rdfs:Class", - "rdfs:comment": "A group of multiple reservations with common values for all sub-reservations.", - "rdfs:label": "ReservationPackage", - "rdfs:subClassOf": "schema:Reservation", - "properties": [ - { - "@id": "schema:subReservation", - "@type": "rdf:Property", - "domainIncludes": "schema:ReservationPackage", - "rangeIncludes": "schema:Reservation", - "rdfs:comment": "The individual reservations included in the package. Typically a repeated property.", - "rdfs:label": "subReservation" - } - ] - }, - { - "@id": "schema:TaxiReservation", - "@type": "rdfs:Class", - "rdfs:comment": "A reservation for a taxi.Note: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. For offers of tickets, use http://schema.org/Offer.", - "rdfs:label": " TaxiReservation", - "rdfs:subClassOf": "schema:Reservation", - "properties": [ - { - "@id": "schema:partySize", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:FoodEstablishmentReservation", - "schema:TaxiReservation" - ], - "rangeIncludes": [ - "schema:Number", - "schema:QuantitativeValue" - ], - "rdfs:comment": "Number of people the reservation should accommodate.", - "rdfs:label": "partySize" - }, - { - "@id": "schema:pickupLocation", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:RentalCarReservation", - "schema:TaxiReservation" - ], - "rangeIncludes": "schema:Place", - "rdfs:comment": "Where a taxi will pick up a passenger or a rental car can be picked up.", - "rdfs:label": "pickupLocation" - }, - { - "@id": "schema:pickupTime", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:RentalCarReservation", - "schema:TaxiReservation" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "When a taxi will pickup a passenger or a rental car can be picked up.", - "rdfs:label": "pickupTime" - } - ] - }, - { - "@id": "schema:TrainReservation", - "@type": "rdfs:Class", - "rdfs:comment": "A reservation for train travel.Note: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. For offers of tickets, use http://schema.org/Offer.", - "rdfs:label": " TrainReservation", - "rdfs:subClassOf": "schema:Reservation" - } - ], - "properties": [ - { - "@id": "schema:bookingAgent", - "@type": "rdf:Property", - "domainIncludes": "schema:Reservation", - "rangeIncludes": [ - "schema:Person", - "schema:Organization" - ], - "rdfs:comment": "If the reservation was not booked directly through the provider, the third-party booking agent can be recorded through this property.", - "rdfs:label": "bookingAgent" - }, - { - "@id": "schema:bookingTime", - "@type": "rdf:Property", - "domainIncludes": "schema:Reservation", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The date and time the reservation was booked.", - "rdfs:label": "bookingTime" - }, - { - "@id": "schema:modifiedTime", - "@type": "rdf:Property", - "domainIncludes": "schema:Reservation", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The date and time the reservation was modified.", - "rdfs:label": "modifiedTime" - }, - { - "@id": "schema:priceCurrency", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Reservation", - "schema:Ticket", - "schema:Offer", - "schema:PriceSpecification" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The currency (in 3-letter ISO 4217 format) of the price or a price component, when attached to PriceSpecification and its subtypes.", - "rdfs:label": "priceCurrency" - }, - { - "@id": "schema:programMembershipUsed", - "@type": "rdf:Property", - "domainIncludes": "schema:Reservation", - "rangeIncludes": "schema:ProgramMembership", - "rdfs:comment": "Any membership in a frequent flyer, hotel loyalty program, etc. being applied to the reservation.", - "rdfs:label": "programMembershipUsed" - }, - { - "@id": "schema:provider", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Service", - "schema:Reservation", - "schema:Flight", - "schema:TrainTrip", - "schema:BusTrip" - ], - "rangeIncludes": [ - "schema:Person", - "schema:Organization" - ], - "rdfs:comment": "The person or organization providing the service, reservation, or creative work. The provider may subcontract out the service.", - "rdfs:label": "provider" - }, - { - "@id": "schema:reservationFor", - "@type": "rdf:Property", - "domainIncludes": "schema:Reservation", - "rangeIncludes": "schema:Thing", - "rdfs:comment": "The thing -- flight, event, restaurant,etc. being reserved.", - "rdfs:label": "reservationFor" - }, - { - "@id": "schema:reservationId", - "@type": "rdf:Property", - "domainIncludes": "schema:Reservation", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A unique identifier for the reservation.", - "rdfs:label": "reservationId" - }, - { - "@id": "schema:reservationStatus", - "@type": "rdf:Property", - "domainIncludes": "schema:Reservation", - "rangeIncludes": "schema:ReservationStatusType", - "rdfs:comment": "The current status of the reservation.", - "rdfs:label": "reservationStatus" - }, - { - "@id": "schema:reservedTicket", - "@type": "rdf:Property", - "domainIncludes": "schema:Reservation", - "rangeIncludes": "schema:Ticket", - "rdfs:comment": "A ticket associated with the reservation.", - "rdfs:label": "reservedTicket" - }, - { - "@id": "schema:totalPrice", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Reservation", - "schema:Ticket" - ], - "rangeIncludes": [ - "schema:Number", - "schema:Text", - "schema:PriceSpecification" - ], - "rdfs:comment": "The total price for the reservation or ticket, including applicable taxes, shipping, etc.", - "rdfs:label": "totalPrice" - }, - { - "@id": "schema:underName", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Reservation", - "schema:Ticket" - ], - "rangeIncludes": [ - "schema:Person", - "schema:Organization" - ], - "rdfs:comment": "The person or organization the reservation or ticket is for.", - "rdfs:label": "underName" - } - ] - }, - { - "@id": "schema:Role", - "@type": "rdfs:Class", - "rdfs:comment": "Represents additional information about a relationship or property. For example a Role can be used to say that a 'member' role linking some SportsTeam to a player occurred during a particular time period. Or that a Person's 'actor' role in a Movie was for some particular characterName. Such properties can be attached to a Role entity, which is then associated with the main entities using ordinary properties like 'member' or 'actor'.\n ", - "rdfs:label": "Role", - "rdfs:subClassOf": "schema:Intangible", - "children": [ - { - "@id": "schema:OrganizationRole", - "@type": "rdfs:Class", - "rdfs:comment": "A subclass of Role used to describe roles within organizations.", - "rdfs:label": "OrganizationRole", - "rdfs:subClassOf": "schema:Role", - "properties": [ - { - "@id": "schema:namedPosition", - "@type": "rdf:Property", - "domainIncludes": "schema:OrganizationRole", - "rangeIncludes": [ - "schema:Text", - "schema:URL" - ], - "rdfs:comment": "A position played, performed or filled by a person or organization, as part of an organization. For example, an athlete in a SportsTeam might play in the position named 'Quarterback'.", - "rdfs:label": "namedPosition" - } - ] - }, - { - "@id": "schema:PerformanceRole", - "@type": "rdfs:Class", - "rdfs:comment": "A PerformanceRole is a Role that some entity places with regard to a theatrical performance, e.g. in a Movie, TVSeries etc.", - "rdfs:label": "PerformanceRole", - "rdfs:subClassOf": "schema:Role", - "properties": [ - { - "@id": "schema:characterName", - "@type": "rdf:Property", - "domainIncludes": "schema:PerformanceRole", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The name of a character played in some acting or performing role, i.e. in a PerformanceRole.", - "rdfs:label": "characterName" - } - ] - } - ], - "properties": [ - { - "@id": "schema:endDate", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Role", - "schema:Event", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries" - ], - "rangeIncludes": "schema:Date", - "rdfs:comment": "The end date and time of the role, event or item (in ISO 8601 date format).", - "rdfs:label": "endDate" - }, - { - "@id": "schema:startDate", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Role", - "schema:Event", - "schema:Season", - "schema:TVSeason", - "schema:Series", - "schema:TVSeries" - ], - "rangeIncludes": "schema:Date", - "rdfs:comment": "The start date and time of the event, role or item (in ISO 8601 date format).", - "rdfs:label": "startDate" - } - ] - }, - { - "@id": "schema:Seat", - "@type": "rdfs:Class", - "rdfs:comment": "Used to describe a seat, such as a reserved seat in an event reservation.", - "rdfs:label": "Seat", - "rdfs:subClassOf": "schema:Intangible", - "properties": [ - { - "@id": "schema:seatNumber", - "@type": "rdf:Property", - "domainIncludes": "schema:Seat", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The location of the reserved seat (e.g., 27).", - "rdfs:label": "seatNumber" - }, - { - "@id": "schema:seatRow", - "@type": "rdf:Property", - "domainIncludes": "schema:Seat", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The row location of the reserved seat (e.g., B).", - "rdfs:label": "seatRow" - }, - { - "@id": "schema:seatSection", - "@type": "rdf:Property", - "domainIncludes": "schema:Seat", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The section location of the reserved seat (e.g. Orchestra).", - "rdfs:label": "seatSection" - }, - { - "@id": "schema:seatingType", - "@type": "rdf:Property", - "domainIncludes": "schema:Seat", - "rangeIncludes": [ - "schema:Text", - "schema:QualitativeValue" - ], - "rdfs:comment": "The type/class of the seat.", - "rdfs:label": "seatingType" - } - ] - }, - { - "@id": "schema:Service", - "@type": "rdfs:Class", - "rdfs:comment": "A service provided by an organization, e.g. delivery service, print services, etc.", - "rdfs:label": "Service", - "rdfs:subClassOf": "schema:Intangible", - "children": [ - { - "@id": "schema:GovernmentService", - "@type": "rdfs:Class", - "rdfs:comment": "A service provided by a government organization, e.g. food stamps, veterans benefits, etc.", - "rdfs:label": "GovernmentService", - "rdfs:subClassOf": "schema:Service", - "properties": [ - { - "@id": "schema:serviceOperator", - "@type": "rdf:Property", - "domainIncludes": "schema:GovernmentService", - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The operating organization, if different from the provider. This enables the representation of services that are provided by an organization, but operated by another organization like a subcontractor.", - "rdfs:label": "serviceOperator" - } - ] - }, - { - "@id": "schema:Taxi", - "@type": "rdfs:Class", - "rdfs:comment": "A taxi.", - "rdfs:label": "Taxi", - "rdfs:subClassOf": "schema:Service" - } - ], - "properties": [ - { - "@id": "schema:availableChannel", - "@type": "rdf:Property", - "domainIncludes": "schema:Service", - "rangeIncludes": "schema:ServiceChannel", - "rdfs:comment": "A means of accessing the service (e.g. a phone bank, a web site, a location, etc.)", - "rdfs:label": "availableChannel" - }, - { - "@id": "schema:produces", - "@type": "rdf:Property", - "domainIncludes": "schema:Service", - "rangeIncludes": "schema:Thing", - "rdfs:comment": "The tangible thing generated by the service, e.g. a passport, permit, etc.", - "rdfs:label": "produces" - }, - { - "@id": "schema:provider", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Service", - "schema:Reservation", - "schema:Flight", - "schema:TrainTrip", - "schema:BusTrip" - ], - "rangeIncludes": [ - "schema:Person", - "schema:Organization" - ], - "rdfs:comment": "The person or organization providing the service, reservation, or creative work. The provider may subcontract out the service.", - "rdfs:label": "provider" - }, - { - "@id": "schema:serviceArea", - "@type": "rdf:Property", - "domainIncludes": "schema:Service", - "rangeIncludes": "schema:AdministrativeArea", - "rdfs:comment": "The geographic area where the service is provided.", - "rdfs:label": "serviceArea" - }, - { - "@id": "schema:serviceAudience", - "@type": "rdf:Property", - "domainIncludes": "schema:Service", - "rangeIncludes": "schema:Audience", - "rdfs:comment": "The audience eligible for this service.", - "rdfs:label": "serviceAudience" - }, - { - "@id": "schema:serviceType", - "@type": "rdf:Property", - "domainIncludes": "schema:Service", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The type of service being offered, e.g. veterans' benefits, emergency relief, etc.", - "rdfs:label": "serviceType" - } - ] - }, - { - "@id": "schema:ServiceChannel", - "@type": "rdfs:Class", - "rdfs:comment": "A means for accessing a service, e.g. a government office location, web site, or phone number.", - "rdfs:label": "ServiceChannel", - "rdfs:subClassOf": "schema:Intangible", - "properties": [ - { - "@id": "schema:availableLanguage", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:ContactPoint", - "schema:ServiceChannel" - ], - "rangeIncludes": "schema:Language", - "rdfs:comment": "A language someone may use with the item.", - "rdfs:label": "availableLanguage" - }, - { - "@id": "schema:processingTime", - "@type": "rdf:Property", - "domainIncludes": "schema:ServiceChannel", - "rangeIncludes": "schema:Duration", - "rdfs:comment": "Estimated processing time for the service using this channel.", - "rdfs:label": "processingTime" - }, - { - "@id": "schema:providesService", - "@type": "rdf:Property", - "domainIncludes": "schema:ServiceChannel", - "rangeIncludes": "schema:Service", - "rdfs:comment": "The service provided by this channel.", - "rdfs:label": "providesService" - }, - { - "@id": "schema:serviceLocation", - "@type": "rdf:Property", - "domainIncludes": "schema:ServiceChannel", - "rangeIncludes": "schema:Place", - "rdfs:comment": "The location (e.g. civic structure, local business, etc.) where a person can go to access the service.", - "rdfs:label": "serviceLocation" - }, - { - "@id": "schema:servicePhone", - "@type": "rdf:Property", - "domainIncludes": "schema:ServiceChannel", - "rangeIncludes": "schema:ContactPoint", - "rdfs:comment": "The phone number to use to access the service.", - "rdfs:label": "servicePhone" - }, - { - "@id": "schema:servicePostalAddress", - "@type": "rdf:Property", - "domainIncludes": "schema:ServiceChannel", - "rangeIncludes": "schema:PostalAddress", - "rdfs:comment": "The address for accessing the service by mail.", - "rdfs:label": "servicePostalAddress" - }, - { - "@id": "schema:serviceSmsNumber", - "@type": "rdf:Property", - "domainIncludes": "schema:ServiceChannel", - "rangeIncludes": "schema:ContactPoint", - "rdfs:comment": "The number to access the service by text message.", - "rdfs:label": "serviceSmsNumber" - }, - { - "@id": "schema:serviceUrl", - "@type": "rdf:Property", - "domainIncludes": "schema:ServiceChannel", - "rangeIncludes": "schema:URL", - "rdfs:comment": "The website to access the service.", - "rdfs:label": "serviceUrl" - } - ] - }, - { - "@id": "schema:StructuredValue", - "@type": "rdfs:Class", - "rdfs:comment": "Structured values are strings—for example, addresses—that have certain constraints on their structure.", - "rdfs:label": "StructuredValue", - "rdfs:subClassOf": "schema:Intangible", - "children": [ - { - "@id": "schema:ContactPoint", - "@type": "rdfs:Class", - "rdfs:comment": "A contact point—for example, a Customer Complaints department.", - "rdfs:label": "ContactPoint", - "rdfs:subClassOf": "schema:StructuredValue", - "children": [ - { - "@id": "schema:PostalAddress", - "@type": "rdfs:Class", - "rdfs:comment": "The mailing address.", - "rdfs:label": "PostalAddress", - "rdfs:subClassOf": "schema:ContactPoint", - "properties": [ - { - "@id": "schema:addressCountry", - "@type": "rdf:Property", - "domainIncludes": "schema:PostalAddress", - "rangeIncludes": "schema:Country", - "rdfs:comment": "The country. For example, USA. You can also provide the two-letter ISO 3166-1 alpha-2 country code.", - "rdfs:label": "addressCountry" - }, - { - "@id": "schema:addressLocality", - "@type": "rdf:Property", - "domainIncludes": "schema:PostalAddress", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The locality. For example, Mountain View.", - "rdfs:label": "addressLocality" - }, - { - "@id": "schema:addressRegion", - "@type": "rdf:Property", - "domainIncludes": "schema:PostalAddress", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The region. For example, CA.", - "rdfs:label": "addressRegion" - }, - { - "@id": "schema:postOfficeBoxNumber", - "@type": "rdf:Property", - "domainIncludes": "schema:PostalAddress", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The post office box number for PO box addresses.", - "rdfs:label": "postOfficeBoxNumber" - }, - { - "@id": "schema:postalCode", - "@type": "rdf:Property", - "domainIncludes": "schema:PostalAddress", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The postal code. For example, 94043.", - "rdfs:label": "postalCode" - }, - { - "@id": "schema:streetAddress", - "@type": "rdf:Property", - "domainIncludes": "schema:PostalAddress", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The street address. For example, 1600 Amphitheatre Pkwy.", - "rdfs:label": "streetAddress" - } - ] - } - ], - "properties": [ - { - "@id": "schema:areaServed", - "@type": "rdf:Property", - "domainIncludes": "schema:ContactPoint", - "rangeIncludes": "schema:AdministrativeArea", - "rdfs:comment": "The location served by this contact point (e.g., a phone number intended for Europeans vs. North Americans or only within the United States.)", - "rdfs:label": "areaServed" - }, - { - "@id": "schema:availableLanguage", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:ContactPoint", - "schema:ServiceChannel" - ], - "rangeIncludes": "schema:Language", - "rdfs:comment": "A language someone may use with the item.", - "rdfs:label": "availableLanguage" - }, - { - "@id": "schema:contactOption", - "@type": "rdf:Property", - "domainIncludes": "schema:ContactPoint", - "rangeIncludes": "schema:ContactPointOption", - "rdfs:comment": "An option available on this contact point (e.g. a toll-free number or support for hearing-impaired callers.)", - "rdfs:label": "contactOption" - }, - { - "@id": "schema:contactType", - "@type": "rdf:Property", - "domainIncludes": "schema:ContactPoint", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A person or organization can have different contact points, for different purposes. For example, a sales contact point, a PR contact point and so on. This property is used to specify the kind of contact point.", - "rdfs:label": "contactType" - }, - { - "@id": "schema:email", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:ContactPoint", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "Email address.", - "rdfs:label": "email" - }, - { - "@id": "schema:faxNumber", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:ContactPoint", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The fax number.", - "rdfs:label": "faxNumber" - }, - { - "@id": "schema:hoursAvailable", - "@type": "rdf:Property", - "domainIncludes": "schema:ContactPoint", - "rangeIncludes": "schema:OpeningHoursSpecification", - "rdfs:comment": "The hours during which this contact point is available.", - "rdfs:label": "hoursAvailable" - }, - { - "@id": "schema:productSupported", - "@type": "rdf:Property", - "domainIncludes": "schema:ContactPoint", - "rangeIncludes": [ - "schema:Product", - "schema:Text" - ], - "rdfs:comment": "The product or service this support contact point is related to (such as product support for a particular product line). This can be a specific product or product line (e.g. \"iPhone\") or a general category of products or services (e.g. \"smartphones\").", - "rdfs:label": "productSupported" - }, - { - "@id": "schema:telephone", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:ContactPoint", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The telephone number.", - "rdfs:label": "telephone" - } - ] - }, - { - "@id": "schema:GeoCoordinates", - "@type": "rdfs:Class", - "rdfs:comment": "The geographic coordinates of a place or event.", - "rdfs:label": "GeoCoordinates", - "rdfs:subClassOf": "schema:StructuredValue", - "properties": [ - { - "@id": "schema:elevation", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:GeoCoordinates", - "schema:GeoShape" - ], - "rangeIncludes": [ - "schema:Number", - "schema:Text" - ], - "rdfs:comment": "The elevation of a location.", - "rdfs:label": "elevation" - }, - { - "@id": "schema:latitude", - "@type": "rdf:Property", - "domainIncludes": "schema:GeoCoordinates", - "rangeIncludes": [ - "schema:Number", - "schema:Text" - ], - "rdfs:comment": "The latitude of a location. For example 37.42242.", - "rdfs:label": "latitude" - }, - { - "@id": "schema:longitude", - "@type": "rdf:Property", - "domainIncludes": "schema:GeoCoordinates", - "rangeIncludes": [ - "schema:Number", - "schema:Text" - ], - "rdfs:comment": "The longitude of a location. For example -122.08585.", - "rdfs:label": "longitude" - } - ] - }, - { - "@id": "schema:GeoShape", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_rNews" - }, - "rdfs:comment": "The geographic shape of a place.", - "rdfs:label": "GeoShape", - "rdfs:subClassOf": "schema:StructuredValue", - "properties": [ - { - "@id": "schema:box", - "@type": "rdf:Property", - "domainIncludes": "schema:GeoShape", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A polygon is the area enclosed by a point-to-point path for which the starting and ending points are the same. A polygon is expressed as a series of four or more space delimited points where the first and final points are identical.", - "rdfs:label": "box" - }, - { - "@id": "schema:circle", - "@type": "rdf:Property", - "domainIncludes": "schema:GeoShape", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A circle is the circular region of a specified radius centered at a specified latitude and longitude. A circle is expressed as a pair followed by a radius in meters.", - "rdfs:label": "circle" - }, - { - "@id": "schema:elevation", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:GeoCoordinates", - "schema:GeoShape" - ], - "rangeIncludes": [ - "schema:Number", - "schema:Text" - ], - "rdfs:comment": "The elevation of a location.", - "rdfs:label": "elevation" - }, - { - "@id": "schema:line", - "@type": "rdf:Property", - "domainIncludes": "schema:GeoShape", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A line is a point-to-point path consisting of two or more points. A line is expressed as a series of two or more point objects separated by space.", - "rdfs:label": "line" - }, - { - "@id": "schema:polygon", - "@type": "rdf:Property", - "domainIncludes": "schema:GeoShape", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A polygon is the area enclosed by a point-to-point path for which the starting and ending points are the same. A polygon is expressed as a series of four or more space delimited points where the first and final points are identical.", - "rdfs:label": "polygon" - } - ] - }, - { - "@id": "schema:NutritionInformation", - "@type": "rdfs:Class", - "rdfs:comment": "Nutritional information about the recipe.", - "rdfs:label": "NutritionInformation", - "rdfs:subClassOf": "schema:StructuredValue", - "properties": [ - { - "@id": "schema:calories", - "@type": "rdf:Property", - "domainIncludes": "schema:NutritionInformation", - "rangeIncludes": "schema:Energy", - "rdfs:comment": "The number of calories", - "rdfs:label": "calories" - }, - { - "@id": "schema:carbohydrateContent", - "@type": "rdf:Property", - "domainIncludes": "schema:NutritionInformation", - "rangeIncludes": "schema:Mass", - "rdfs:comment": "The number of grams of carbohydrates.", - "rdfs:label": "carbohydrateContent" - }, - { - "@id": "schema:cholesterolContent", - "@type": "rdf:Property", - "domainIncludes": "schema:NutritionInformation", - "rangeIncludes": "schema:Mass", - "rdfs:comment": "The number of milligrams of cholesterol.", - "rdfs:label": "cholesterolContent" - }, - { - "@id": "schema:fatContent", - "@type": "rdf:Property", - "domainIncludes": "schema:NutritionInformation", - "rangeIncludes": "schema:Mass", - "rdfs:comment": "The number of grams of fat.", - "rdfs:label": "fatContent" - }, - { - "@id": "schema:fiberContent", - "@type": "rdf:Property", - "domainIncludes": "schema:NutritionInformation", - "rangeIncludes": "schema:Mass", - "rdfs:comment": "The number of grams of fiber.", - "rdfs:label": "fiberContent" - }, - { - "@id": "schema:proteinContent", - "@type": "rdf:Property", - "domainIncludes": "schema:NutritionInformation", - "rangeIncludes": "schema:Mass", - "rdfs:comment": "The number of grams of protein.", - "rdfs:label": "proteinContent" - }, - { - "@id": "schema:saturatedFatContent", - "@type": "rdf:Property", - "domainIncludes": "schema:NutritionInformation", - "rangeIncludes": "schema:Mass", - "rdfs:comment": "The number of grams of saturated fat.", - "rdfs:label": "saturatedFatContent" - }, - { - "@id": "schema:servingSize", - "@type": "rdf:Property", - "domainIncludes": "schema:NutritionInformation", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The serving size, in terms of the number of volume or mass.", - "rdfs:label": "servingSize" - }, - { - "@id": "schema:sodiumContent", - "@type": "rdf:Property", - "domainIncludes": "schema:NutritionInformation", - "rangeIncludes": "schema:Mass", - "rdfs:comment": "The number of milligrams of sodium.", - "rdfs:label": "sodiumContent" - }, - { - "@id": "schema:sugarContent", - "@type": "rdf:Property", - "domainIncludes": "schema:NutritionInformation", - "rangeIncludes": "schema:Mass", - "rdfs:comment": "The number of grams of sugar.", - "rdfs:label": "sugarContent" - }, - { - "@id": "schema:transFatContent", - "@type": "rdf:Property", - "domainIncludes": "schema:NutritionInformation", - "rangeIncludes": "schema:Mass", - "rdfs:comment": "The number of grams of trans fat.", - "rdfs:label": "transFatContent" - }, - { - "@id": "schema:unsaturatedFatContent", - "@type": "rdf:Property", - "domainIncludes": "schema:NutritionInformation", - "rangeIncludes": "schema:Mass", - "rdfs:comment": "The number of grams of unsaturated fat.", - "rdfs:label": "unsaturatedFatContent" - } - ] - }, - { - "@id": "schema:OpeningHoursSpecification", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A structured value providing information about the opening hours of a place or a certain service inside a place.", - "rdfs:label": "OpeningHoursSpecification", - "rdfs:subClassOf": "schema:StructuredValue", - "properties": [ - { - "@id": "schema:closes", - "@type": "rdf:Property", - "domainIncludes": "schema:OpeningHoursSpecification", - "rangeIncludes": "schema:Time", - "rdfs:comment": "The closing hour of the place or service on the given day(s) of the week.", - "rdfs:label": "closes" - }, - { - "@id": "schema:dayOfWeek", - "@type": "rdf:Property", - "domainIncludes": "schema:OpeningHoursSpecification", - "rangeIncludes": "schema:DayOfWeek", - "rdfs:comment": "The day of the week for which these opening hours are valid.", - "rdfs:label": "dayOfWeek" - }, - { - "@id": "schema:opens", - "@type": "rdf:Property", - "domainIncludes": "schema:OpeningHoursSpecification", - "rangeIncludes": "schema:Time", - "rdfs:comment": "The opening hour of the place or service on the given day(s) of the week.", - "rdfs:label": "opens" - }, - { - "@id": "schema:validFrom", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:Demand", - "schema:OpeningHoursSpecification", - "schema:Permit" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The date when the item becomes valid.", - "rdfs:label": "validFrom" - }, - { - "@id": "schema:validThrough", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:Demand", - "schema:OpeningHoursSpecification" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The end of the validity of offer, price specification, or opening hours data.", - "rdfs:label": "validThrough" - } - ] - }, - { - "@id": "schema:OwnershipInfo", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A structured value providing information about when a certain organization or person owned a certain product.", - "rdfs:label": "OwnershipInfo", - "rdfs:subClassOf": "schema:StructuredValue", - "properties": [ - { - "@id": "schema:acquiredFrom", - "@type": "rdf:Property", - "domainIncludes": "schema:OwnershipInfo", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "The organization or person from which the product was acquired.", - "rdfs:label": "acquiredFrom" - }, - { - "@id": "schema:ownedFrom", - "@type": "rdf:Property", - "domainIncludes": "schema:OwnershipInfo", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The date and time of obtaining the product.", - "rdfs:label": "ownedFrom" - }, - { - "@id": "schema:ownedThrough", - "@type": "rdf:Property", - "domainIncludes": "schema:OwnershipInfo", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The date and time of giving up ownership on the product.", - "rdfs:label": "ownedThrough" - }, - { - "@id": "schema:typeOfGood", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:OwnershipInfo", - "schema:TypeAndQuantityNode" - ], - "rangeIncludes": "schema:Product", - "rdfs:comment": "The product that this structured value is referring to.", - "rdfs:label": "typeOfGood" - } - ] - }, - { - "@id": "schema:PriceSpecification", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A structured value representing a monetary amount. Typically, only the subclasses of this type are used for markup.", - "rdfs:label": "PriceSpecification", - "rdfs:subClassOf": "schema:StructuredValue", - "children": [ - { - "@id": "schema:DeliveryChargeSpecification", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "The price for the delivery of an offer using a particular delivery method.", - "rdfs:label": "DeliveryChargeSpecification", - "rdfs:subClassOf": "schema:PriceSpecification", - "properties": [ - { - "@id": "schema:appliesToDeliveryMethod", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DeliveryChargeSpecification", - "schema:PaymentChargeSpecification" - ], - "rangeIncludes": "schema:DeliveryMethod", - "rdfs:comment": "The delivery method(s) to which the delivery charge or payment charge specification applies.", - "rdfs:label": "appliesToDeliveryMethod" - }, - { - "@id": "schema:eligibleRegion", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:DeliveryChargeSpecification", - "schema:Demand" - ], - "rangeIncludes": [ - "schema:GeoShape", - "schema:Text" - ], - "rdfs:comment": "The ISO 3166-1 (ISO 3166-1 alpha-2) or ISO 3166-2 code, or the GeoShape for the geo-political region(s) for which the offer or delivery charge specification is valid.", - "rdfs:label": "eligibleRegion" - } - ] - }, - { - "@id": "schema:PaymentChargeSpecification", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "The costs of settling the payment using a particular payment method.", - "rdfs:label": "PaymentChargeSpecification", - "rdfs:subClassOf": "schema:PriceSpecification", - "properties": [ - { - "@id": "schema:appliesToDeliveryMethod", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DeliveryChargeSpecification", - "schema:PaymentChargeSpecification" - ], - "rangeIncludes": "schema:DeliveryMethod", - "rdfs:comment": "The delivery method(s) to which the delivery charge or payment charge specification applies.", - "rdfs:label": "appliesToDeliveryMethod" - }, - { - "@id": "schema:appliesToPaymentMethod", - "@type": "rdf:Property", - "domainIncludes": "schema:PaymentChargeSpecification", - "rangeIncludes": "schema:PaymentMethod", - "rdfs:comment": "The payment method(s) to which the payment charge specification applies.", - "rdfs:label": "appliesToPaymentMethod" - } - ] - }, - { - "@id": "schema:UnitPriceSpecification", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "The price asked for a given offer by the respective organization or person.", - "rdfs:label": "UnitPriceSpecification", - "rdfs:subClassOf": "schema:PriceSpecification", - "properties": [ - { - "@id": "schema:billingIncrement", - "@type": "rdf:Property", - "domainIncludes": "schema:UnitPriceSpecification", - "rangeIncludes": "schema:Number", - "rdfs:comment": "This property specifies the minimal quantity and rounding increment that will be the basis for the billing. The unit of measurement is specified by the unitCode property.", - "rdfs:label": "billingIncrement" - }, - { - "@id": "schema:priceType", - "@type": "rdf:Property", - "domainIncludes": "schema:UnitPriceSpecification", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A short text or acronym indicating multiple price specifications for the same offer, e.g. SRP for the suggested retail price or INVOICE for the invoice price, mostly used in the car industry.", - "rdfs:label": "priceType" - }, - { - "@id": "schema:unitCode", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:QuantitativeValue", - "schema:TypeAndQuantityNode", - "schema:UnitPriceSpecification" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The unit of measurement given using the UN/CEFACT Common Code (3 characters).", - "rdfs:label": "unitCode" - } - ] - } - ], - "properties": [ - { - "@id": "schema:eligibleQuantity", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:Demand" - ], - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The interval and unit of measurement of ordering quantities for which the offer or price specification is valid. This allows e.g. specifying that a certain freight charge is valid only for a certain quantity.", - "rdfs:label": "eligibleQuantity" - }, - { - "@id": "schema:eligibleTransactionVolume", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:Demand" - ], - "rangeIncludes": "schema:PriceSpecification", - "rdfs:comment": "The transaction volume, in a monetary unit, for which the offer or price specification is valid, e.g. for indicating a minimal purchasing volume, to express free shipping above a certain order volume, or to limit the acceptance of credit cards to purchases to a certain minimal amount.", - "rdfs:label": "eligibleTransactionVolume" - }, - { - "@id": "schema:maxPrice", - "@type": "rdf:Property", - "domainIncludes": "schema:PriceSpecification", - "rangeIncludes": "schema:Number", - "rdfs:comment": "The highest price if the price is a range.", - "rdfs:label": "maxPrice" - }, - { - "@id": "schema:minPrice", - "@type": "rdf:Property", - "domainIncludes": "schema:PriceSpecification", - "rangeIncludes": "schema:Number", - "rdfs:comment": "The lowest price if the price is a range.", - "rdfs:label": "minPrice" - }, - { - "@id": "schema:price", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:TradeAction" - ], - "rangeIncludes": [ - "schema:Number", - "schema:Text" - ], - "rdfs:comment": "The offer price of a product, or of a price component when attached to PriceSpecification and its subtypes.", - "rdfs:label": "price" - }, - { - "@id": "schema:priceCurrency", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Reservation", - "schema:Ticket", - "schema:Offer", - "schema:PriceSpecification" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The currency (in 3-letter ISO 4217 format) of the price or a price component, when attached to PriceSpecification and its subtypes.", - "rdfs:label": "priceCurrency" - }, - { - "@id": "schema:validFrom", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:Demand", - "schema:OpeningHoursSpecification", - "schema:Permit" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The date when the item becomes valid.", - "rdfs:label": "validFrom" - }, - { - "@id": "schema:validThrough", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PriceSpecification", - "schema:Demand", - "schema:OpeningHoursSpecification" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The end of the validity of offer, price specification, or opening hours data.", - "rdfs:label": "validThrough" - }, - { - "@id": "schema:valueAddedTaxIncluded", - "@type": "rdf:Property", - "domainIncludes": "schema:PriceSpecification", - "rangeIncludes": "schema:Boolean", - "rdfs:comment": "Specifies whether the applicable value-added tax (VAT) is included in the price specification or not.", - "rdfs:label": "valueAddedTaxIncluded" - } - ] - }, - { - "@id": "schema:QuantitativeValue", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": " A point value or interval for product characteristics and other purposes.", - "rdfs:label": "QuantitativeValue", - "rdfs:subClassOf": "schema:StructuredValue", - "properties": [ - { - "@id": "schema:maxValue", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:QuantitativeValue", - "schema:PropertyValueSpecification" - ], - "rangeIncludes": "schema:Number", - "rdfs:comment": "The upper value of some characteristic or property.", - "rdfs:label": "maxValue" - }, - { - "@id": "schema:minValue", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:QuantitativeValue", - "schema:PropertyValueSpecification" - ], - "rangeIncludes": "schema:Number", - "rdfs:comment": "The lower value of some characteristic or property.", - "rdfs:label": "minValue" - }, - { - "@id": "schema:unitCode", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:QuantitativeValue", - "schema:TypeAndQuantityNode", - "schema:UnitPriceSpecification" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The unit of measurement given using the UN/CEFACT Common Code (3 characters).", - "rdfs:label": "unitCode" - }, - { - "@id": "schema:value", - "@type": "rdf:Property", - "domainIncludes": "schema:QuantitativeValue", - "rangeIncludes": "schema:Number", - "rdfs:comment": "The value of the product characteristic.", - "rdfs:label": "value" - }, - { - "@id": "schema:valueReference", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:QualitativeValue", - "schema:QuantitativeValue" - ], - "rangeIncludes": [ - "schema:Enumeration", - "schema:StructuredValue" - ], - "rdfs:comment": "A pointer to a secondary value that provides additional information on the original value, e.g. a reference temperature.", - "rdfs:label": "valueReference" - } - ] - }, - { - "@id": "schema:TypeAndQuantityNode", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A structured value indicating the quantity, unit of measurement, and business function of goods included in a bundle offer.", - "rdfs:label": "TypeAndQuantityNode", - "rdfs:subClassOf": "schema:StructuredValue", - "properties": [ - { - "@id": "schema:amountOfThisGood", - "@type": "rdf:Property", - "domainIncludes": "schema:TypeAndQuantityNode", - "rangeIncludes": "schema:Number", - "rdfs:comment": "The quantity of the goods included in the offer.", - "rdfs:label": "amountOfThisGood" - }, - { - "@id": "schema:businessFunction", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand", - "schema:TypeAndQuantityNode" - ], - "rangeIncludes": "schema:BusinessFunction", - "rdfs:comment": "The business function (e.g. sell, lease, repair, dispose) of the offer or component of a bundle (TypeAndQuantityNode). The default is http://purl.org/goodrelations/v1#Sell.", - "rdfs:label": "businessFunction" - }, - { - "@id": "schema:typeOfGood", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:OwnershipInfo", - "schema:TypeAndQuantityNode" - ], - "rangeIncludes": "schema:Product", - "rdfs:comment": "The product that this structured value is referring to.", - "rdfs:label": "typeOfGood" - }, - { - "@id": "schema:unitCode", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:QuantitativeValue", - "schema:TypeAndQuantityNode", - "schema:UnitPriceSpecification" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The unit of measurement given using the UN/CEFACT Common Code (3 characters).", - "rdfs:label": "unitCode" - } - ] - }, - { - "@id": "schema:WarrantyPromise", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A structured value representing the duration and scope of services that will be provided to a customer free of charge in case of a defect or malfunction of a product.", - "rdfs:label": "WarrantyPromise", - "rdfs:subClassOf": "schema:StructuredValue", - "properties": [ - { - "@id": "schema:durationOfWarranty", - "@type": "rdf:Property", - "domainIncludes": "schema:WarrantyPromise", - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The duration of the warranty promise. Common unitCode values are ANN for year, MON for months, or DAY for days.", - "rdfs:label": "durationOfWarranty" - }, - { - "@id": "schema:warrantyScope", - "@type": "rdf:Property", - "domainIncludes": "schema:WarrantyPromise", - "rangeIncludes": "schema:WarrantyScope", - "rdfs:comment": "The scope of the warranty promise.", - "rdfs:label": "warrantyScope" - } - ] - } - ] - }, - { - "@id": "schema:Ticket", - "@type": "rdfs:Class", - "rdfs:comment": "Used to describe a ticket to an event, a flight, a bus ride, etc.", - "rdfs:label": "Ticket", - "rdfs:subClassOf": "schema:Intangible", - "properties": [ - { - "@id": "schema:dateIssued", - "@type": "rdf:Property", - "domainIncludes": "schema:Ticket", - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The date the ticket was issued.", - "rdfs:label": "dateIssued" - }, - { - "@id": "schema:issuedBy", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Ticket", - "schema:Permit" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The organization issuing the ticket or permit.", - "rdfs:label": "issuedBy" - }, - { - "@id": "schema:priceCurrency", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Reservation", - "schema:Ticket", - "schema:Offer", - "schema:PriceSpecification" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The currency (in 3-letter ISO 4217 format) of the price or a price component, when attached to PriceSpecification and its subtypes.", - "rdfs:label": "priceCurrency" - }, - { - "@id": "schema:ticketNumber", - "@type": "rdf:Property", - "domainIncludes": "schema:Ticket", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The unique identifier for the ticket.", - "rdfs:label": "ticketNumber" - }, - { - "@id": "schema:ticketToken", - "@type": "rdf:Property", - "domainIncludes": "schema:Ticket", - "rangeIncludes": [ - "schema:Text", - "schema:URL" - ], - "rdfs:comment": "Reference to an asset (e.g., Barcode, QR code image or PDF) usable for entrance.", - "rdfs:label": "ticketToken" - }, - { - "@id": "schema:ticketedSeat", - "@type": "rdf:Property", - "domainIncludes": "schema:Ticket", - "rangeIncludes": "schema:Seat", - "rdfs:comment": "The seat associated with the ticket.", - "rdfs:label": "ticketedSeat" - }, - { - "@id": "schema:totalPrice", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Reservation", - "schema:Ticket" - ], - "rangeIncludes": [ - "schema:Number", - "schema:Text", - "schema:PriceSpecification" - ], - "rdfs:comment": "The total price for the reservation or ticket, including applicable taxes, shipping, etc.", - "rdfs:label": "totalPrice" - }, - { - "@id": "schema:underName", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Reservation", - "schema:Ticket" - ], - "rangeIncludes": [ - "schema:Person", - "schema:Organization" - ], - "rdfs:comment": "The person or organization the reservation or ticket is for.", - "rdfs:label": "underName" - } - ] - }, - { - "@id": "schema:TrainTrip", - "@type": "rdfs:Class", - "rdfs:comment": "A trip on a commercial train line.", - "rdfs:label": "Train Trip", - "rdfs:subClassOf": "schema:Intangible", - "properties": [ - { - "@id": "schema:arrivalPlatform", - "@type": "rdf:Property", - "domainIncludes": "schema:TrainTrip", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The platform where the train arrives.", - "rdfs:label": "arrivalPlatform" - }, - { - "@id": "schema:arrivalStation", - "@type": "rdf:Property", - "domainIncludes": "schema:TrainTrip", - "rangeIncludes": "schema:TrainStation", - "rdfs:comment": "The station where the train trip ends.", - "rdfs:label": "arrivalStation" - }, - { - "@id": "schema:arrivalTime", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:BusTrip", - "schema:Flight", - "schema:TrainTrip" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The expected arrival time.", - "rdfs:label": "arrivalTime" - }, - { - "@id": "schema:departurePlatform", - "@type": "rdf:Property", - "domainIncludes": "schema:TrainTrip", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The platform from which the train departs.", - "rdfs:label": "departurePlatform" - }, - { - "@id": "schema:departureStation", - "@type": "rdf:Property", - "domainIncludes": "schema:TrainTrip", - "rangeIncludes": "schema:TrainStation", - "rdfs:comment": "The station from which the train departs.", - "rdfs:label": "departureStation" - }, - { - "@id": "schema:departureTime", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:BusTrip", - "schema:Flight", - "schema:TrainTrip" - ], - "rangeIncludes": "schema:DateTime", - "rdfs:comment": "The expected departure time.", - "rdfs:label": "departureTime" - }, - { - "@id": "schema:provider", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Service", - "schema:Reservation", - "schema:Flight", - "schema:TrainTrip", - "schema:BusTrip" - ], - "rangeIncludes": [ - "schema:Person", - "schema:Organization" - ], - "rdfs:comment": "The person or organization providing the service, reservation, or creative work. The provider may subcontract out the service.", - "rdfs:label": "provider" - }, - { - "@id": "schema:trainName", - "@type": "rdf:Property", - "domainIncludes": "schema:TrainTrip", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The name of the train (e.g. The Orient Express).", - "rdfs:label": "trainName" - }, - { - "@id": "schema:trainNumber", - "@type": "rdf:Property", - "domainIncludes": "schema:TrainTrip", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The unique identifier for the train.", - "rdfs:label": "trainNumber" - } - ] - } - ] - }, - { - "@id": "schema:MedicalEntity", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "The most generic type of entity related to health and the practice of medicine.", - "rdfs:label": "MedicalEntity", - "rdfs:subClassOf": "schema:Thing", - "children": [ - { - "@id": "schema:AnatomicalStructure", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any part of the human body, typically a component of an anatomical system. Organs, tissues, and cells are all anatomical structures.", - "rdfs:label": "AnatomicalStructure", - "rdfs:subClassOf": "schema:MedicalEntity", - "children": [ - { - "@id": "schema:Bone", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Rigid connective tissue that comprises up the skeletal structure of the human body.", - "rdfs:label": "Bone", - "rdfs:subClassOf": "schema:AnatomicalStructure" - }, - { - "@id": "schema:BrainStructure", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any anatomical structure which pertains to the soft nervous tissue functioning as the coordinating center of sensation and intellectual and nervous activity.", - "rdfs:label": "BrainStructure", - "rdfs:subClassOf": "schema:AnatomicalStructure" - }, - { - "@id": "schema:Joint", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "The anatomical location at which two or more bones make contact.", - "rdfs:label": "Joint", - "rdfs:subClassOf": "schema:AnatomicalStructure", - "properties": [ - { - "@id": "schema:biomechnicalClass", - "@type": "rdf:Property", - "domainIncludes": "schema:Joint", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The biomechanical properties of the bone.", - "rdfs:label": "biomechnicalClass" - }, - { - "@id": "schema:functionalClass", - "@type": "rdf:Property", - "domainIncludes": "schema:Joint", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The degree of mobility the joint allows.", - "rdfs:label": "functionalClass" - }, - { - "@id": "schema:structuralClass", - "@type": "rdf:Property", - "domainIncludes": "schema:Joint", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The name given to how bone physically connects to each other.", - "rdfs:label": "structuralClass" - } - ] - }, - { - "@id": "schema:Ligament", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A short band of tough, flexible, fibrous connective tissue that functions to connect multiple bones, cartilages, and structurally support joints.", - "rdfs:label": "Ligament", - "rdfs:subClassOf": "schema:AnatomicalStructure" - }, - { - "@id": "schema:Muscle", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A muscle is an anatomical structure consisting of a contractile form of tissue that animals use to effect movement.", - "rdfs:label": "Muscle", - "rdfs:subClassOf": "schema:AnatomicalStructure", - "properties": [ - { - "@id": "schema:action", - "@type": "rdf:Property", - "domainIncludes": "schema:Muscle", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The movement the muscle generates.", - "rdfs:label": "action", - "supercededBy": "schema:muscleAction" - }, - { - "@id": "schema:antagonist", - "@type": "rdf:Property", - "domainIncludes": "schema:Muscle", - "rangeIncludes": "schema:Muscle", - "rdfs:comment": "The muscle whose action counteracts the specified muscle.", - "rdfs:label": "antagonist" - }, - { - "@id": "schema:bloodSupply", - "@type": "rdf:Property", - "domainIncludes": "schema:Muscle", - "rangeIncludes": "schema:Vessel", - "rdfs:comment": "The blood vessel that carries blood from the heart to the muscle.", - "rdfs:label": "bloodSupply" - }, - { - "@id": "schema:insertion", - "@type": "rdf:Property", - "domainIncludes": "schema:Muscle", - "rangeIncludes": "schema:AnatomicalStructure", - "rdfs:comment": "The place of attachment of a muscle, or what the muscle moves.", - "rdfs:label": "insertion" - }, - { - "@id": "schema:muscleAction", - "@type": "rdf:Property", - "domainIncludes": "schema:Muscle", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The movement the muscle generates.", - "rdfs:label": "muscleAction" - }, - { - "@id": "schema:nerve", - "@type": "rdf:Property", - "domainIncludes": "schema:Muscle", - "rangeIncludes": "schema:Nerve", - "rdfs:comment": "The underlying innervation associated with the muscle.", - "rdfs:label": "nerve" - }, - { - "@id": "schema:origin", - "@type": "rdf:Property", - "domainIncludes": "schema:Muscle", - "rangeIncludes": "schema:AnatomicalStructure", - "rdfs:comment": "The place or point where a muscle arises.", - "rdfs:label": "origin" - } - ] - }, - { - "@id": "schema:Nerve", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A common pathway for the electrochemical nerve impulses that are transmitted along each of the axons.", - "rdfs:label": "Nerve", - "rdfs:subClassOf": "schema:AnatomicalStructure", - "properties": [ - { - "@id": "schema:branch", - "@type": "rdf:Property", - "domainIncludes": "schema:Nerve", - "rangeIncludes": [ - "schema:AnatomicalStructure", - "schema:Nerve" - ], - "rdfs:comment": "The branches that delineate from the nerve bundle.", - "rdfs:label": "branch" - }, - { - "@id": "schema:nerveMotor", - "@type": "rdf:Property", - "domainIncludes": "schema:Nerve", - "rangeIncludes": "schema:Muscle", - "rdfs:comment": "The neurological pathway extension that involves muscle control.", - "rdfs:label": "nerveMotor" - }, - { - "@id": "schema:sensoryUnit", - "@type": "rdf:Property", - "domainIncludes": "schema:Nerve", - "rangeIncludes": [ - "schema:AnatomicalStructure", - "schema:SuperficialAnatomy" - ], - "rdfs:comment": "The neurological pathway extension that inputs and sends information to the brain or spinal cord.", - "rdfs:label": "sensoryUnit" - }, - { - "@id": "schema:sourcedFrom", - "@type": "rdf:Property", - "domainIncludes": "schema:Nerve", - "rangeIncludes": "schema:BrainStructure", - "rdfs:comment": "The neurological pathway that originates the neurons.", - "rdfs:label": "sourcedFrom" - } - ] - }, - { - "@id": "schema:Vessel", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A component of the human body circulatory system comprised of an intricate network of hollow tubes that transport blood throughout the entire body.", - "rdfs:label": "Vessel", - "rdfs:subClassOf": "schema:AnatomicalStructure", - "children": [ - { - "@id": "schema:Artery", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A type of blood vessel that specifically carries blood away from the heart.", - "rdfs:label": "Artery", - "rdfs:subClassOf": "schema:Vessel", - "properties": [ - { - "@id": "schema:arterialBranch", - "@type": "rdf:Property", - "domainIncludes": "schema:Artery", - "rangeIncludes": "schema:AnatomicalStructure", - "rdfs:comment": "The branches that comprise the arterial structure.", - "rdfs:label": "arterialBranch" - }, - { - "@id": "schema:source", - "@type": "rdf:Property", - "domainIncludes": "schema:Artery", - "rangeIncludes": "schema:AnatomicalStructure", - "rdfs:comment": "The anatomical or organ system that the artery originates from.", - "rdfs:label": "source" - }, - { - "@id": "schema:supplyTo", - "@type": "rdf:Property", - "domainIncludes": "schema:Artery", - "rangeIncludes": "schema:AnatomicalStructure", - "rdfs:comment": "The area to which the artery supplies blood.", - "rdfs:label": "supplyTo" - } - ] - }, - { - "@id": "schema:LymphaticVessel", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A type of blood vessel that specifically carries lymph fluid unidirectionally toward the heart.", - "rdfs:label": "LymphaticVessel", - "rdfs:subClassOf": "schema:Vessel", - "properties": [ - { - "@id": "schema:originatesFrom", - "@type": "rdf:Property", - "domainIncludes": "schema:LymphaticVessel", - "rangeIncludes": "schema:Vessel", - "rdfs:comment": "The vasculature the lymphatic structure originates, or afferents, from.", - "rdfs:label": "originatesFrom" - }, - { - "@id": "schema:regionDrained", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:LymphaticVessel", - "schema:Vein" - ], - "rangeIncludes": [ - "schema:AnatomicalStructure", - "schema:AnatomicalSystem" - ], - "rdfs:comment": "The anatomical or organ system drained by this vessel; generally refers to a specific part of an organ.", - "rdfs:label": "regionDrained" - }, - { - "@id": "schema:runsTo", - "@type": "rdf:Property", - "domainIncludes": "schema:LymphaticVessel", - "rangeIncludes": "schema:Vessel", - "rdfs:comment": "The vasculature the lymphatic structure runs, or efferents, to.", - "rdfs:label": "runsTo" - } - ] - }, - { - "@id": "schema:Vein", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A type of blood vessel that specifically carries blood to the heart.", - "rdfs:label": "Vein", - "rdfs:subClassOf": "schema:Vessel", - "properties": [ - { - "@id": "schema:drainsTo", - "@type": "rdf:Property", - "domainIncludes": "schema:Vein", - "rangeIncludes": "schema:Vessel", - "rdfs:comment": "The vasculature that the vein drains into.", - "rdfs:label": "drainsTo" - }, - { - "@id": "schema:regionDrained", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:LymphaticVessel", - "schema:Vein" - ], - "rangeIncludes": [ - "schema:AnatomicalStructure", - "schema:AnatomicalSystem" - ], - "rdfs:comment": "The anatomical or organ system drained by this vessel; generally refers to a specific part of an organ.", - "rdfs:label": "regionDrained" - }, - { - "@id": "schema:tributary", - "@type": "rdf:Property", - "domainIncludes": "schema:Vein", - "rangeIncludes": "schema:AnatomicalStructure", - "rdfs:comment": "The anatomical or organ system that the vein flows into; a larger structure that the vein connects to.", - "rdfs:label": "tributary" - } - ] - } - ] - } - ], - "properties": [ - { - "@id": "schema:associatedPathophysiology", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:AnatomicalStructure", - "schema:AnatomicalSystem", - "schema:SuperficialAnatomy" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "If applicable, a description of the pathophysiology associated with the anatomical system, including potential abnormal changes in the mechanical, physical, and biochemical functions of the system.", - "rdfs:label": "associatedPathophysiology" - }, - { - "@id": "schema:bodyLocation", - "@type": "rdf:Property", - "domainIncludes": "schema:AnatomicalStructure", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Location in the body of the anatomical structure.", - "rdfs:label": "bodyLocation" - }, - { - "@id": "schema:connectedTo", - "@type": "rdf:Property", - "domainIncludes": "schema:AnatomicalStructure", - "rangeIncludes": "schema:AnatomicalStructure", - "rdfs:comment": "Other anatomical structures to which this structure is connected.", - "rdfs:label": "connectedTo" - }, - { - "@id": "schema:diagram", - "@type": "rdf:Property", - "domainIncludes": "schema:AnatomicalStructure", - "rangeIncludes": "schema:ImageObject", - "rdfs:comment": "An image containing a diagram that illustrates the structure and/or its component substructures and/or connections with other structures.", - "rdfs:label": "diagram" - }, - { - "@id": "schema:function", - "@type": "rdf:Property", - "domainIncludes": "schema:AnatomicalStructure", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Function of the anatomical structure.", - "rdfs:label": "function" - }, - { - "@id": "schema:partOfSystem", - "@type": "rdf:Property", - "domainIncludes": "schema:AnatomicalStructure", - "rangeIncludes": "schema:AnatomicalSystem", - "rdfs:comment": "The anatomical or organ system that this structure is part of.", - "rdfs:label": "partOfSystem" - }, - { - "@id": "schema:relatedCondition", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:AnatomicalStructure", - "schema:AnatomicalSystem", - "schema:SuperficialAnatomy" - ], - "rangeIncludes": "schema:MedicalCondition", - "rdfs:comment": "A medical condition associated with this anatomy.", - "rdfs:label": "relatedCondition" - }, - { - "@id": "schema:relatedTherapy", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:AnatomicalStructure", - "schema:AnatomicalSystem", - "schema:SuperficialAnatomy" - ], - "rangeIncludes": "schema:MedicalTherapy", - "rdfs:comment": "A medical therapy related to this anatomy.", - "rdfs:label": "relatedTherapy" - }, - { - "@id": "schema:subStructure", - "@type": "rdf:Property", - "domainIncludes": "schema:AnatomicalStructure", - "rangeIncludes": "schema:AnatomicalStructure", - "rdfs:comment": "Component (sub-)structure(s) that comprise this anatomical structure.", - "rdfs:label": "subStructure" - } - ] - }, - { - "@id": "schema:AnatomicalSystem", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "An anatomical system is a group of anatomical structures that work together to perform a certain task. Anatomical systems, such as organ systems, are one organizing principle of anatomy, and can includes circulatory, digestive, endocrine, integumentary, immune, lymphatic, muscular, nervous, reproductive, respiratory, skeletal, urinary, vestibular, and other systems.", - "rdfs:label": "AnatomicalSystem", - "rdfs:subClassOf": "schema:MedicalEntity", - "properties": [ - { - "@id": "schema:associatedPathophysiology", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:AnatomicalStructure", - "schema:AnatomicalSystem", - "schema:SuperficialAnatomy" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "If applicable, a description of the pathophysiology associated with the anatomical system, including potential abnormal changes in the mechanical, physical, and biochemical functions of the system.", - "rdfs:label": "associatedPathophysiology" - }, - { - "@id": "schema:comprisedOf", - "@type": "rdf:Property", - "domainIncludes": "schema:AnatomicalSystem", - "rangeIncludes": [ - "schema:AnatomicalStructure", - "schema:AnatomicalSystem" - ], - "rdfs:comment": "The underlying anatomical structures, such as organs, that comprise the anatomical system.", - "rdfs:label": "comprisedOf" - }, - { - "@id": "schema:relatedCondition", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:AnatomicalStructure", - "schema:AnatomicalSystem", - "schema:SuperficialAnatomy" - ], - "rangeIncludes": "schema:MedicalCondition", - "rdfs:comment": "A medical condition associated with this anatomy.", - "rdfs:label": "relatedCondition" - }, - { - "@id": "schema:relatedStructure", - "@type": "rdf:Property", - "domainIncludes": "schema:AnatomicalSystem", - "rangeIncludes": "schema:AnatomicalStructure", - "rdfs:comment": "Related anatomical structure(s) that are not part of the system but relate or connect to it, such as vascular bundles associated with an organ system.", - "rdfs:label": "relatedStructure" - }, - { - "@id": "schema:relatedTherapy", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:AnatomicalStructure", - "schema:AnatomicalSystem", - "schema:SuperficialAnatomy" - ], - "rangeIncludes": "schema:MedicalTherapy", - "rdfs:comment": "A medical therapy related to this anatomy.", - "rdfs:label": "relatedTherapy" - } - ] - }, - { - "@id": "schema:MedicalCause", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "The causative agent(s) that are responsible for the pathophysiologic process that eventually results in a medical condition, symptom or sign. In this schema, unless otherwise specified this is meant to be the proximate cause of the medical condition, symptom or sign. The proximate cause is defined as the causative agent that most directly results in the medical condition, symptom or sign. For example, the HIV virus could be considered a cause of AIDS. Or in a diagnostic context, if a patient fell and sustained a hip fracture and two days later sustained a pulmonary embolism which eventuated in a cardiac arrest, the cause of the cardiac arrest (the proximate cause) would be the pulmonary embolism and not the fall.

Medical causes can include cardiovascular, chemical, dermatologic, endocrine, environmental, gastroenterologic, genetic, hematologic, gynecologic, iatrogenic, infectious, musculoskeletal, neurologic, nutritional, obstetric, oncologic, otolaryngologic, pharmacologic, psychiatric, pulmonary, renal, rheumatologic, toxic, traumatic, or urologic causes; medical conditions can be causes as well.", - "rdfs:label": "MedicalCause", - "rdfs:subClassOf": "schema:MedicalEntity", - "properties": [ - { - "@id": "schema:causeOf", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalCause", - "rangeIncludes": "schema:MedicalEntity", - "rdfs:comment": "The condition, complication, symptom, sign, etc. caused.", - "rdfs:label": "causeOf" - } - ] - }, - { - "@id": "schema:MedicalCondition", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any condition of the human body that affects the normal functioning of a person, whether physically or mentally. Includes diseases, injuries, disabilities, disorders, syndromes, etc.", - "rdfs:label": "MedicalCondition", - "rdfs:subClassOf": "schema:MedicalEntity", - "children": [ - { - "@id": "schema:InfectiousDisease", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "An infectious disease is a clinically evident human disease resulting from the presence of pathogenic microbial agents, like pathogenic viruses, pathogenic bacteria, fungi, protozoa, multicellular parasites, and prions. To be considered an infectious disease, such pathogens are known to be able to cause this disease.", - "rdfs:label": "InfectiousDisease", - "rdfs:subClassOf": "schema:MedicalCondition", - "properties": [ - { - "@id": "schema:infectiousAgent", - "@type": "rdf:Property", - "domainIncludes": "schema:InfectiousDisease", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The actual infectious agent, such as a specific bacterium.", - "rdfs:label": "infectiousAgent" - }, - { - "@id": "schema:infectiousAgentClass", - "@type": "rdf:Property", - "domainIncludes": "schema:InfectiousDisease", - "rangeIncludes": "schema:InfectiousAgentClass", - "rdfs:comment": "The class of infectious agent (bacteria, prion, etc.) that causes the disease.", - "rdfs:label": "infectiousAgentClass" - }, - { - "@id": "schema:transmissionMethod", - "@type": "rdf:Property", - "domainIncludes": "schema:InfectiousDisease", - "rangeIncludes": "schema:Text", - "rdfs:comment": "How the disease spreads, either as a route or vector, for example 'direct contact', 'Aedes aegypti', etc.", - "rdfs:label": "transmissionMethod" - } - ] - } - ], - "properties": [ - { - "@id": "schema:associatedAnatomy", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:PhysicalActivity", - "schema:MedicalCondition" - ], - "rangeIncludes": [ - "schema:AnatomicalStructure", - "schema:AnatomicalSystem", - "schema:SuperficialAnatomy" - ], - "rdfs:comment": "The anatomy of the underlying organ system or structures associated with this entity.", - "rdfs:label": "associatedAnatomy" - }, - { - "@id": "schema:cause", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MedicalCondition", - "schema:MedicalSignOrSymptom" - ], - "rangeIncludes": "schema:MedicalCause", - "rdfs:comment": "An underlying cause. More specifically, one of the causative agent(s) that are most directly responsible for the pathophysiologic process that eventually results in the occurrence.", - "rdfs:label": "cause" - }, - { - "@id": "schema:differentialDiagnosis", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalCondition", - "rangeIncludes": "schema:DDxElement", - "rdfs:comment": "One of a set of differential diagnoses for the condition. Specifically, a closely-related or competing diagnosis typically considered later in the cognitive process whereby this medical condition is distinguished from others most likely responsible for a similar collection of signs and symptoms to reach the most parsimonious diagnosis or diagnoses in a patient.", - "rdfs:label": "differentialDiagnosis" - }, - { - "@id": "schema:epidemiology", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:PhysicalActivity", - "schema:MedicalCondition" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The characteristics of associated patients, such as age, gender, race etc.", - "rdfs:label": "epidemiology" - }, - { - "@id": "schema:expectedPrognosis", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalCondition", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The likely outcome in either the short term or long term of the medical condition.", - "rdfs:label": "expectedPrognosis" - }, - { - "@id": "schema:naturalProgression", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalCondition", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The expected progression of the condition if it is not treated and allowed to progress naturally.", - "rdfs:label": "naturalProgression" - }, - { - "@id": "schema:pathophysiology", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:PhysicalActivity", - "schema:MedicalCondition" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "Changes in the normal mechanical, physical, and biochemical functions that are associated with this activity or condition.", - "rdfs:label": "pathophysiology" - }, - { - "@id": "schema:possibleComplication", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalCondition", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A possible unexpected and unfavorable evolution of a medical condition. Complications may include worsening of the signs or symptoms of the disease, extension of the condition to other organ systems, etc.", - "rdfs:label": "possibleComplication" - }, - { - "@id": "schema:possibleTreatment", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MedicalCondition", - "schema:MedicalSignOrSymptom" - ], - "rangeIncludes": "schema:MedicalTherapy", - "rdfs:comment": "A possible treatment to address this condition, sign or symptom.", - "rdfs:label": "possibleTreatment" - }, - { - "@id": "schema:primaryPrevention", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalCondition", - "rangeIncludes": "schema:MedicalTherapy", - "rdfs:comment": "A preventative therapy used to prevent an initial occurrence of the medical condition, such as vaccination.", - "rdfs:label": "primaryPrevention" - }, - { - "@id": "schema:riskFactor", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalCondition", - "rangeIncludes": "schema:MedicalRiskFactor", - "rdfs:comment": "A modifiable or non-modifiable factor that increases the risk of a patient contracting this condition, e.g. age, coexisting condition.", - "rdfs:label": "riskFactor" - }, - { - "@id": "schema:secondaryPrevention", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalCondition", - "rangeIncludes": "schema:MedicalTherapy", - "rdfs:comment": "A preventative therapy used to prevent reoccurrence of the medical condition after an initial episode of the condition.", - "rdfs:label": "secondaryPrevention" - }, - { - "@id": "schema:signOrSymptom", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalCondition", - "rangeIncludes": "schema:MedicalSignOrSymptom", - "rdfs:comment": "A sign or symptom of this condition. Signs are objective or physically observable manifestations of the medical condition while symptoms are the subjective experience of the medical condition.", - "rdfs:label": "signOrSymptom" - }, - { - "@id": "schema:stage", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalCondition", - "rangeIncludes": "schema:MedicalConditionStage", - "rdfs:comment": "The stage of the condition, if applicable.", - "rdfs:label": "stage" - }, - { - "@id": "schema:subtype", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalCondition", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A more specific type of the condition, where applicable, for example 'Type 1 Diabetes', 'Type 2 Diabetes', or 'Gestational Diabetes' for Diabetes.", - "rdfs:label": "subtype" - }, - { - "@id": "schema:typicalTest", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalCondition", - "rangeIncludes": "schema:MedicalTest", - "rdfs:comment": "A medical test typically performed given this condition.", - "rdfs:label": "typicalTest" - } - ] - }, - { - "@id": "schema:MedicalContraindication", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A condition or factor that serves as a reason to withhold a certain medical therapy. Contraindications can be absolute (there are no reasonable circumstances for undertaking a course of action) or relative (the patient is at higher risk of complications, but that these risks may be outweighed by other considerations or mitigated by other measures).", - "rdfs:label": "MedicalContraindication", - "rdfs:subClassOf": "schema:MedicalEntity" - }, - { - "@id": "schema:MedicalDevice", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any object used in a medical capacity, such as to diagnose or treat a patient.", - "rdfs:label": "MedicalDevice", - "rdfs:subClassOf": "schema:MedicalEntity", - "properties": [ - { - "@id": "schema:adverseOutcome", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MedicalTherapy", - "schema:MedicalDevice" - ], - "rangeIncludes": "schema:MedicalEntity", - "rdfs:comment": "A possible complication and/or side effect of this therapy. If it is known that an adverse outcome is serious (resulting in death, disability, or permanent damage; requiring hospitalization; or is otherwise life-threatening or requires immediate medical attention), tag it as a seriouseAdverseOutcome instead.", - "rdfs:label": "adverseOutcome" - }, - { - "@id": "schema:contraindication", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MedicalTherapy", - "schema:MedicalDevice" - ], - "rangeIncludes": "schema:MedicalContraindication", - "rdfs:comment": "A contraindication for this therapy.", - "rdfs:label": "contraindication" - }, - { - "@id": "schema:indication", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MedicalTherapy", - "schema:MedicalDevice" - ], - "rangeIncludes": "schema:MedicalIndication", - "rdfs:comment": "A factor that indicates use of this therapy for treatment and/or prevention of a condition, symptom, etc. For therapies such as drugs, indications can include both officially-approved indications as well as off-label uses. These can be distinguished by using the ApprovedIndication subtype of MedicalIndication.", - "rdfs:label": "indication" - }, - { - "@id": "schema:postOp", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalDevice", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A description of the postoperative procedures, care, and/or followups for this device.", - "rdfs:label": "postOp" - }, - { - "@id": "schema:preOp", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalDevice", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A description of the workup, testing, and other preparations required before implanting this device.", - "rdfs:label": "preOp" - }, - { - "@id": "schema:procedure", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalDevice", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A description of the procedure involved in setting up, using, and/or installing the device.", - "rdfs:label": "procedure" - }, - { - "@id": "schema:purpose", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MedicalDevice", - "schema:AllocateAction", - "schema:PayAction" - ], - "rangeIncludes": [ - "schema:MedicalDevicePurpose", - "schema:Thing" - ], - "rdfs:comment": "A goal towards an action is taken. Can be concrete or abstract.", - "rdfs:label": "purpose" - }, - { - "@id": "schema:seriousAdverseOutcome", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MedicalTherapy", - "schema:MedicalDevice" - ], - "rangeIncludes": "schema:MedicalEntity", - "rdfs:comment": "A possible serious complication and/or serious side effect of this therapy. Serious adverse outcomes include those that are life-threatening; result in death, disability, or permanent damage; require hospitalization or prolong existing hospitalization; cause congenital anomalies or birth defects; or jeopardize the patient and may require medical or surgical intervention to prevent one of the outcomes in this definition.", - "rdfs:label": "seriousAdverseOutcome" - } - ] - }, - { - "@id": "schema:MedicalGuideline", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any recommendation made by a standard society (e.g. ACC/AHA) or consensus statement that denotes how to diagnose and treat a particular condition. Note: this type should be used to tag the actual guideline recommendation; if the guideline recommendation occurs in a larger scholarly article, use MedicalScholarlyArticle to tag the overall article, not this type. Note also: the organization making the recommendation should be captured in the recognizingAuthority base property of MedicalEntity.", - "rdfs:label": "MedicalGuideline", - "rdfs:subClassOf": "schema:MedicalEntity", - "children": [ - { - "@id": "schema:MedicalGuidelineContraindication", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A guideline contraindication that designates a process as harmful and where quality of the data supporting the contraindication is sound.", - "rdfs:label": "MedicalGuidelineContraindication", - "rdfs:subClassOf": "schema:MedicalGuideline" - }, - { - "@id": "schema:MedicalGuidelineRecommendation", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A guideline recommendation that is regarded as efficacious and where quality of the data supporting the recommendation is sound.", - "rdfs:label": "MedicalGuidelineRecommendation", - "rdfs:subClassOf": "schema:MedicalGuideline", - "properties": [ - { - "@id": "schema:recommendationStrength", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalGuidelineRecommendation", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Strength of the guideline's recommendation (e.g. 'class I').", - "rdfs:label": "recommendationStrength" - } - ] - } - ], - "properties": [ - { - "@id": "schema:evidenceLevel", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalGuideline", - "rangeIncludes": "schema:MedicalEvidenceLevel", - "rdfs:comment": "Strength of evidence of the data used to formulate the guideline (enumerated).", - "rdfs:label": "evidenceLevel" - }, - { - "@id": "schema:evidenceOrigin", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalGuideline", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Source of the data used to formulate the guidance, e.g. RCT, consensus opinion, etc.", - "rdfs:label": "evidenceOrigin" - }, - { - "@id": "schema:guidelineDate", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalGuideline", - "rangeIncludes": "schema:Date", - "rdfs:comment": "Date on which this guideline's recommendation was made.", - "rdfs:label": "guidelineDate" - }, - { - "@id": "schema:guidelineSubject", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalGuideline", - "rangeIncludes": "schema:MedicalEntity", - "rdfs:comment": "The medical conditions, treatments, etc. that are the subject of the guideline.", - "rdfs:label": "guidelineSubject" - } - ] - }, - { - "@id": "schema:MedicalIndication", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A condition or factor that indicates use of a medical therapy, including signs, symptoms, risk factors, anatomical states, etc.", - "rdfs:label": "MedicalIndication", - "rdfs:subClassOf": "schema:MedicalEntity", - "children": [ - { - "@id": "schema:ApprovedIndication", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "An indication for a medical therapy that has been formally specified or approved by a regulatory body that regulates use of the therapy; for example, the US FDA approves indications for most drugs in the US.", - "rdfs:label": "ApprovedIndication", - "rdfs:subClassOf": "schema:MedicalIndication" - }, - { - "@id": "schema:PreventionIndication", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "An indication for preventing an underlying condition, symptom, etc.", - "rdfs:label": "PreventionIndication", - "rdfs:subClassOf": "schema:MedicalIndication" - }, - { - "@id": "schema:TreatmentIndication", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "An indication for treating an underlying condition, symptom, etc.", - "rdfs:label": "TreatmentIndication", - "rdfs:subClassOf": "schema:MedicalIndication" - } - ] - }, - { - "@id": "schema:MedicalIntangible", - "@type": "rdfs:Class", - "rdfs:comment": "A utility class that serves as the umbrella for a number of 'intangible' things in the medical space.", - "rdfs:label": "MedicalIntangible", - "rdfs:subClassOf": "schema:MedicalEntity", - "children": [ - { - "@id": "schema:DDxElement", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "An alternative, closely-related condition typically considered later in the differential diagnosis process along with the signs that are used to distinguish it.", - "rdfs:label": "DDxElement", - "rdfs:subClassOf": "schema:MedicalIntangible", - "properties": [ - { - "@id": "schema:diagnosis", - "@type": "rdf:Property", - "domainIncludes": "schema:DDxElement", - "rangeIncludes": "schema:MedicalCondition", - "rdfs:comment": "One or more alternative conditions considered in the differential diagnosis process.", - "rdfs:label": "diagnosis" - }, - { - "@id": "schema:distinguishingSign", - "@type": "rdf:Property", - "domainIncludes": "schema:DDxElement", - "rangeIncludes": "schema:MedicalSignOrSymptom", - "rdfs:comment": "One of a set of signs and symptoms that can be used to distinguish this diagnosis from others in the differential diagnosis.", - "rdfs:label": "distinguishingSign" - } - ] - }, - { - "@id": "schema:DoseSchedule", - "@type": "rdfs:Class", - "rdfs:comment": "A specific dosing schedule for a drug or supplement.", - "rdfs:label": "DoseSchedule", - "rdfs:subClassOf": "schema:MedicalIntangible", - "children": [ - { - "@id": "schema:MaximumDoseSchedule", - "@type": "rdfs:Class", - "rdfs:comment": "The maximum dosing schedule considered safe for a drug or supplement as recommended by an authority or by the drug/supplement's manufacturer. Capture the recommending authority in the recognizingAuthority property of MedicalEntity.", - "rdfs:label": "MaximumDoseSchedule", - "rdfs:subClassOf": "schema:DoseSchedule" - }, - { - "@id": "schema:RecommendedDoseSchedule", - "@type": "rdfs:Class", - "rdfs:comment": "A recommended dosing schedule for a drug or supplement as prescribed or recommended by an authority or by the drug/supplement's manufacturer. Capture the recommending authority in the recognizingAuthority property of MedicalEntity.", - "rdfs:label": "RecommendedDoseSchedule", - "rdfs:subClassOf": "schema:DoseSchedule" - }, - { - "@id": "schema:ReportedDoseSchedule", - "@type": "rdfs:Class", - "rdfs:comment": "A patient-reported or observed dosing schedule for a drug or supplement.", - "rdfs:label": "ReportedDoseSchedule", - "rdfs:subClassOf": "schema:DoseSchedule" - } - ], - "properties": [ - { - "@id": "schema:doseUnit", - "@type": "rdf:Property", - "domainIncludes": "schema:DoseSchedule", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The unit of the dose, e.g. 'mg'.", - "rdfs:label": "doseUnit" - }, - { - "@id": "schema:doseValue", - "@type": "rdf:Property", - "domainIncludes": "schema:DoseSchedule", - "rangeIncludes": "schema:Number", - "rdfs:comment": "The value of the dose, e.g. 500.", - "rdfs:label": "doseValue" - }, - { - "@id": "schema:frequency", - "@type": "rdf:Property", - "domainIncludes": "schema:DoseSchedule", - "rangeIncludes": "schema:Text", - "rdfs:comment": "How often the dose is taken, e.g. 'daily'.", - "rdfs:label": "frequency" - }, - { - "@id": "schema:targetPopulation", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:DoseSchedule" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "Characteristics of the population for which this is intended, or which typically uses it, e.g. 'adults'.", - "rdfs:label": "targetPopulation" - } - ] - }, - { - "@id": "schema:DrugCost", - "@type": "rdfs:Class", - "rdfs:comment": "The cost per unit of a medical drug. Note that this type is not meant to represent the price in an offer of a drug for sale; see the Offer type for that. This type will typically be used to tag wholesale or average retail cost of a drug, or maximum reimbursable cost. Costs of medical drugs vary widely depending on how and where they are paid for, so while this type captures some of the variables, costs should be used with caution by consumers of this schema's markup.", - "rdfs:label": "DrugCost", - "rdfs:subClassOf": "schema:MedicalIntangible", - "properties": [ - { - "@id": "schema:applicableLocation", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DrugCost", - "schema:DrugLegalStatus" - ], - "rangeIncludes": "schema:AdministrativeArea", - "rdfs:comment": "The location in which the status applies.", - "rdfs:label": "applicableLocation" - }, - { - "@id": "schema:costCategory", - "@type": "rdf:Property", - "domainIncludes": "schema:DrugCost", - "rangeIncludes": "schema:DrugCostCategory", - "rdfs:comment": "The category of cost, such as wholesale, retail, reimbursement cap, etc.", - "rdfs:label": "costCategory" - }, - { - "@id": "schema:costCurrency", - "@type": "rdf:Property", - "domainIncludes": "schema:DrugCost", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The currency (in 3-letter ISO 4217 format) of the drug cost.", - "rdfs:label": "costCurrency" - }, - { - "@id": "schema:costOrigin", - "@type": "rdf:Property", - "domainIncludes": "schema:DrugCost", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Additional details to capture the origin of the cost data. For example, 'Medicare Part B'.", - "rdfs:label": "costOrigin" - }, - { - "@id": "schema:costPerUnit", - "@type": "rdf:Property", - "domainIncludes": "schema:DrugCost", - "rangeIncludes": [ - "schema:Number", - "schema:Text" - ], - "rdfs:comment": "The cost per unit of the drug.", - "rdfs:label": "costPerUnit" - }, - { - "@id": "schema:drugUnit", - "@type": "rdf:Property", - "domainIncludes": "schema:DrugCost", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The unit in which the drug is measured, e.g. '5 mg tablet'.", - "rdfs:label": "drugUnit" - } - ] - }, - { - "@id": "schema:DrugLegalStatus", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "The legal availability status of a medical drug.", - "rdfs:label": "DrugLegalStatus", - "rdfs:subClassOf": "schema:MedicalIntangible", - "properties": [ - { - "@id": "schema:applicableLocation", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DrugCost", - "schema:DrugLegalStatus" - ], - "rangeIncludes": "schema:AdministrativeArea", - "rdfs:comment": "The location in which the status applies.", - "rdfs:label": "applicableLocation" - } - ] - }, - { - "@id": "schema:DrugStrength", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A specific strength in which a medical drug is available in a specific country.", - "rdfs:label": "DrugStrength", - "rdfs:subClassOf": "schema:MedicalIntangible", - "properties": [ - { - "@id": "schema:activeIngredient", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:Drug", - "schema:DrugStrength" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "An active ingredient, typically chemical compounds and/or biologic substances.", - "rdfs:label": "activeIngredient" - }, - { - "@id": "schema:availableIn", - "@type": "rdf:Property", - "domainIncludes": "schema:DrugStrength", - "rangeIncludes": "schema:AdministrativeArea", - "rdfs:comment": "The location in which the strength is available.", - "rdfs:label": "availableIn" - }, - { - "@id": "schema:strengthUnit", - "@type": "rdf:Property", - "domainIncludes": "schema:DrugStrength", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The units of an active ingredient's strength, e.g. mg.", - "rdfs:label": "strengthUnit" - }, - { - "@id": "schema:strengthValue", - "@type": "rdf:Property", - "domainIncludes": "schema:DrugStrength", - "rangeIncludes": "schema:Number", - "rdfs:comment": "The value of an active ingredient's strength, e.g. 325.", - "rdfs:label": "strengthValue" - } - ] - }, - { - "@id": "schema:MedicalCode", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A code for a medical entity.", - "rdfs:label": "MedicalCode", - "rdfs:subClassOf": "schema:MedicalIntangible", - "properties": [ - { - "@id": "schema:codeValue", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalCode", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The actual code.", - "rdfs:label": "codeValue" - }, - { - "@id": "schema:codingSystem", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalCode", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The coding system, e.g. 'ICD-10'.", - "rdfs:label": "codingSystem" - } - ] - }, - { - "@id": "schema:MedicalConditionStage", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A stage of a medical condition, such as 'Stage IIIa'.", - "rdfs:label": "MedicalConditionStage", - "rdfs:subClassOf": "schema:MedicalIntangible", - "properties": [ - { - "@id": "schema:stageAsNumber", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalConditionStage", - "rangeIncludes": "schema:Number", - "rdfs:comment": "The stage represented as a number, e.g. 3.", - "rdfs:label": "stageAsNumber" - }, - { - "@id": "schema:subStageSuffix", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalConditionStage", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The substage, e.g. 'a' for Stage IIIa.", - "rdfs:label": "subStageSuffix" - } - ] - }, - { - "@id": "schema:MedicalEnumeration", - "@type": "rdfs:Class", - "rdfs:comment": "Enumerations related to health and the practice of medicine.", - "rdfs:label": "MedicalEnumeration", - "rdfs:subClassOf": [ - "schema:MedicalIntangible", - "schema:Enumeration" - ], - "children": [ - { - "@id": "schema:DrugCostCategory", - "@type": "rdfs:Class", - "rdfs:comment": "Enumerated categories of medical drug costs.", - "rdfs:label": "DrugCostCategory", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:DrugPregnancyCategory", - "@type": "rdfs:Class", - "rdfs:comment": "Categories that represent an assessment of the risk of fetal injury due to a drug or pharmaceutical used as directed by the mother during pregnancy.", - "rdfs:label": "DrugPregnancyCategory", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:DrugPrescriptionStatus", - "@type": "rdfs:Class", - "rdfs:comment": "Indicates whether this drug is available by prescription or over-the-counter.", - "rdfs:label": "DrugPrescriptionStatus", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:InfectiousAgentClass", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Classes of agents or pathogens that transmit infectious diseases. Enumerated type.", - "rdfs:label": "InfectiousAgentClass", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalAudience", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Target audiences for medical web pages. Enumerated type.", - "rdfs:label": "MedicalAudience", - "rdfs:subClassOf": [ - "schema:Audience", - "schema:MedicalEnumeration", - "schema:Enumeration", - "schema:PeopleAudience" - ] - }, - { - "@id": "schema:MedicalDevicePurpose", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Categories of medical devices, organized by the purpose or intended use of the device.", - "rdfs:label": "MedicalDevicePurpose", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalEvidenceLevel", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Level of evidence for a medical guideline. Enumerated type.", - "rdfs:label": "MedicalEvidenceLevel", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalImagingTechnique", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any medical imaging modality typically used for diagnostic purposes. Enumerated type.", - "rdfs:label": "MedicalImagingTechnique", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalObservationalStudyDesign", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Design models for observational medical studies. Enumerated type.", - "rdfs:label": "MedicalObservationalStudyDesign", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalProcedureType", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "An enumeration that describes different types of medical procedures.", - "rdfs:label": "MedicalProcedureType", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalSpecialty", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any specific branch of medical science or practice. Medical specialities include clinical specialties that pertain to particular organ systems and their respective disease states, as well as allied health specialties. Enumerated type.", - "rdfs:label": "MedicalSpecialty", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration", - "schema:Specialty" - ] - }, - { - "@id": "schema:MedicalStudyStatus", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "The status of a medical study. Enumerated type.", - "rdfs:label": "MedicalStudyStatus", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicalTrialDesign", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Design models for medical trials. Enumerated type.", - "rdfs:label": "MedicalTrialDesign", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:MedicineSystem", - "@type": "rdfs:Class", - "rdfs:comment": "Systems of medical practice.", - "rdfs:label": "MedicineSystem", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:PhysicalActivityCategory", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Categories of physical activity, organized by physiologic classification.", - "rdfs:label": "PhysicalActivityCategory", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - }, - { - "@id": "schema:PhysicalExam", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A type of physical examination of a patient performed by a physician. Enumerated type.", - "rdfs:label": "PhysicalExam", - "rdfs:subClassOf": [ - "schema:MedicalEnumeration", - "schema:Enumeration" - ] - } - ] - } - ] - }, - { - "@id": "schema:MedicalProcedure", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A process of care used in either a diagnostic, therapeutic, or palliative capacity that relies on invasive (surgical), non-invasive, or percutaneous techniques.", - "rdfs:label": "MedicalProcedure", - "rdfs:subClassOf": "schema:MedicalEntity", - "children": [ - { - "@id": "schema:DiagnosticProcedure", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A medical procedure intended primarily for diagnostic, as opposed to therapeutic, purposes.", - "rdfs:label": "DiagnosticProcedure", - "rdfs:subClassOf": [ - "schema:MedicalProcedure", - "schema:MedicalTest" - ] - }, - { - "@id": "schema:PalliativeProcedure", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A medical procedure intended primarily for palliative purposes, aimed at relieving the symptoms of an underlying health condition.", - "rdfs:label": "PalliativeProcedure", - "rdfs:subClassOf": [ - "schema:MedicalProcedure", - "schema:MedicalTherapy" - ] - }, - { - "@id": "schema:TherapeuticProcedure", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A medical procedure intended primarily for therapeutic purposes, aimed at improving a health condition.", - "rdfs:label": "TherapeuticProcedure", - "rdfs:subClassOf": [ - "schema:MedicalProcedure", - "schema:MedicalTherapy" - ] - } - ], - "properties": [ - { - "@id": "schema:followup", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalProcedure", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Typical or recommended followup care after the procedure is performed.", - "rdfs:label": "followup" - }, - { - "@id": "schema:howPerformed", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalProcedure", - "rangeIncludes": "schema:Text", - "rdfs:comment": "How the procedure is performed.", - "rdfs:label": "howPerformed" - }, - { - "@id": "schema:preparation", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalProcedure", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Typical preparation that a patient must undergo before having the procedure performed.", - "rdfs:label": "preparation" - }, - { - "@id": "schema:procedureType", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalProcedure", - "rangeIncludes": "schema:MedicalProcedureType", - "rdfs:comment": "The type of procedure, for example Surgical, Noninvasive, or Percutaneous.", - "rdfs:label": "procedureType" - } - ] - }, - { - "@id": "schema:MedicalRiskEstimator", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any rule set or interactive tool for estimating the risk of developing a complication or condition.", - "rdfs:label": "MedicalRiskEstimator", - "rdfs:subClassOf": "schema:MedicalEntity", - "children": [ - { - "@id": "schema:MedicalRiskCalculator", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A complex mathematical calculation requiring an online calculator, used to assess prognosis. Note: use the url property of Thing to record any URLs for online calculators.", - "rdfs:label": "MedicalRiskCalculator", - "rdfs:subClassOf": "schema:MedicalRiskEstimator" - }, - { - "@id": "schema:MedicalRiskScore", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A simple system that adds up the number of risk factors to yield a score that is associated with prognosis, e.g. CHAD score, TIMI risk score.", - "rdfs:label": "MedicalRiskScore", - "rdfs:subClassOf": "schema:MedicalRiskEstimator", - "properties": [ - { - "@id": "schema:algorithm", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalRiskScore", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The algorithm or rules to follow to compute the score.", - "rdfs:label": "algorithm" - } - ] - } - ], - "properties": [ - { - "@id": "schema:estimatesRiskOf", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalRiskEstimator", - "rangeIncludes": "schema:MedicalEntity", - "rdfs:comment": "The condition, complication, or symptom whose risk is being estimated.", - "rdfs:label": "estimatesRiskOf" - }, - { - "@id": "schema:includedRiskFactor", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalRiskEstimator", - "rangeIncludes": "schema:MedicalRiskFactor", - "rdfs:comment": "A modifiable or non-modifiable risk factor included in the calculation, e.g. age, coexisting condition.", - "rdfs:label": "includedRiskFactor" - } - ] - }, - { - "@id": "schema:MedicalRiskFactor", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A risk factor is anything that increases a person's likelihood of developing or contracting a disease, medical condition, or complication.", - "rdfs:label": "MedicalRiskFactor", - "rdfs:subClassOf": "schema:MedicalEntity", - "properties": [ - { - "@id": "schema:increasesRiskOf", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalRiskFactor", - "rangeIncludes": "schema:MedicalEntity", - "rdfs:comment": "The condition, complication, etc. influenced by this factor.", - "rdfs:label": "increasesRiskOf" - } - ] - }, - { - "@id": "schema:MedicalSignOrSymptom", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any indication of the existence of a medical condition or disease.", - "rdfs:label": "MedicalSignOrSymptom", - "rdfs:subClassOf": "schema:MedicalEntity", - "children": [ - { - "@id": "schema:MedicalSign", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any physical manifestation of a person's medical condition discoverable by objective diagnostic tests or physical examination.", - "rdfs:label": "MedicalSign", - "rdfs:subClassOf": "schema:MedicalSignOrSymptom", - "properties": [ - { - "@id": "schema:identifyingExam", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalSign", - "rangeIncludes": "schema:PhysicalExam", - "rdfs:comment": "A physical examination that can identify this sign.", - "rdfs:label": "identifyingExam" - }, - { - "@id": "schema:identifyingTest", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalSign", - "rangeIncludes": "schema:MedicalTest", - "rdfs:comment": "A diagnostic test that can identify this sign.", - "rdfs:label": "identifyingTest" - } - ] - }, - { - "@id": "schema:MedicalSymptom", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any indication of the existence of a medical condition or disease that is apparent to the patient.", - "rdfs:label": "MedicalSymptom", - "rdfs:subClassOf": "schema:MedicalSignOrSymptom" - } - ], - "properties": [ - { - "@id": "schema:cause", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MedicalCondition", - "schema:MedicalSignOrSymptom" - ], - "rangeIncludes": "schema:MedicalCause", - "rdfs:comment": "An underlying cause. More specifically, one of the causative agent(s) that are most directly responsible for the pathophysiologic process that eventually results in the occurrence.", - "rdfs:label": "cause" - }, - { - "@id": "schema:possibleTreatment", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MedicalCondition", - "schema:MedicalSignOrSymptom" - ], - "rangeIncludes": "schema:MedicalTherapy", - "rdfs:comment": "A possible treatment to address this condition, sign or symptom.", - "rdfs:label": "possibleTreatment" - } - ] - }, - { - "@id": "schema:MedicalStudy", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A medical study is an umbrella type covering all kinds of research studies relating to human medicine or health, including observational studies and interventional trials and registries, randomized, controlled or not. When the specific type of study is known, use one of the extensions of this type, such as MedicalTrial or MedicalObservationalStudy. Also, note that this type should be used to mark up data that describes the study itself; to tag an article that publishes the results of a study, use MedicalScholarlyArticle. Note: use the code property of MedicalEntity to store study IDs, e.g. clinicaltrials.gov ID.", - "rdfs:label": "MedicalStudy", - "rdfs:subClassOf": "schema:MedicalEntity", - "children": [ - { - "@id": "schema:MedicalObservationalStudy", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "An observational study is a type of medical study that attempts to infer the possible effect of a treatment through observation of a cohort of subjects over a period of time. In an observational study, the assignment of subjects into treatment groups versus control groups is outside the control of the investigator. This is in contrast with controlled studies, such as the randomized controlled trials represented by MedicalTrial, where each subject is randomly assigned to a treatment group or a control group before the start of the treatment.", - "rdfs:label": "MedicalObservationalStudy", - "rdfs:subClassOf": "schema:MedicalStudy", - "properties": [ - { - "@id": "schema:studyDesign", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalObservationalStudy", - "rangeIncludes": "schema:MedicalObservationalStudyDesign", - "rdfs:comment": "Specifics about the observational study design (enumerated).", - "rdfs:label": "studyDesign" - } - ] - }, - { - "@id": "schema:MedicalTrial", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A medical trial is a type of medical study that uses scientific process used to compare the safety and efficacy of medical therapies or medical procedures. In general, medical trials are controlled and subjects are allocated at random to the different treatment and/or control groups.", - "rdfs:label": "MedicalTrial", - "rdfs:subClassOf": "schema:MedicalStudy", - "properties": [ - { - "@id": "schema:phase", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalTrial", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The phase of the trial.", - "rdfs:label": "phase" - }, - { - "@id": "schema:trialDesign", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalTrial", - "rangeIncludes": "schema:MedicalTrialDesign", - "rdfs:comment": "Specifics about the trial design (enumerated).", - "rdfs:label": "trialDesign" - } - ] - } - ], - "properties": [ - { - "@id": "schema:outcome", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalStudy", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Expected or actual outcomes of the study.", - "rdfs:label": "outcome" - }, - { - "@id": "schema:population", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalStudy", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Any characteristics of the population used in the study, e.g. 'males under 65'.", - "rdfs:label": "population" - }, - { - "@id": "schema:sponsor", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalStudy", - "rangeIncludes": "schema:Organization", - "rdfs:comment": "Sponsor of the study.", - "rdfs:label": "sponsor" - }, - { - "@id": "schema:status", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalStudy", - "rangeIncludes": "schema:MedicalStudyStatus", - "rdfs:comment": "The status of the study (enumerated).", - "rdfs:label": "status" - }, - { - "@id": "schema:studyLocation", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalStudy", - "rangeIncludes": "schema:AdministrativeArea", - "rdfs:comment": "The location in which the study is taking/took place.", - "rdfs:label": "studyLocation" - }, - { - "@id": "schema:studySubject", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalStudy", - "rangeIncludes": "schema:MedicalEntity", - "rdfs:comment": "A subject of the study, i.e. one of the medical conditions, therapies, devices, drugs, etc. investigated by the study.", - "rdfs:label": "studySubject" - } - ] - }, - { - "@id": "schema:MedicalTest", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any medical test, typically performed for diagnostic purposes.", - "rdfs:label": "MedicalTest", - "rdfs:subClassOf": "schema:MedicalEntity", - "children": [ - { - "@id": "schema:BloodTest", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A medical test performed on a sample of a patient's blood.", - "rdfs:label": "BloodTest", - "rdfs:subClassOf": "schema:MedicalTest" - }, - { - "@id": "schema:DiagnosticProcedure", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A medical procedure intended primarily for diagnostic, as opposed to therapeutic, purposes.", - "rdfs:label": "DiagnosticProcedure", - "rdfs:subClassOf": [ - "schema:MedicalProcedure", - "schema:MedicalTest" - ] - }, - { - "@id": "schema:ImagingTest", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any medical imaging modality typically used for diagnostic purposes.", - "rdfs:label": "ImagingTest", - "rdfs:subClassOf": "schema:MedicalTest", - "properties": [ - { - "@id": "schema:imagingTechnique", - "@type": "rdf:Property", - "domainIncludes": "schema:ImagingTest", - "rangeIncludes": "schema:MedicalImagingTechnique", - "rdfs:comment": "Imaging technique used.", - "rdfs:label": "imagingTechnique" - } - ] - }, - { - "@id": "schema:MedicalTestPanel", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any collection of tests commonly ordered together.", - "rdfs:label": "MedicalTestPanel", - "rdfs:subClassOf": "schema:MedicalTest", - "properties": [ - { - "@id": "schema:subTest", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalTestPanel", - "rangeIncludes": "schema:MedicalTest", - "rdfs:comment": "A component test of the panel.", - "rdfs:label": "subTest" - } - ] - }, - { - "@id": "schema:PathologyTest", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A medical test performed by a laboratory that typically involves examination of a tissue sample by a pathologist.", - "rdfs:label": "PathologyTest", - "rdfs:subClassOf": "schema:MedicalTest", - "properties": [ - { - "@id": "schema:tissueSample", - "@type": "rdf:Property", - "domainIncludes": "schema:PathologyTest", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The type of tissue sample required for the test.", - "rdfs:label": "tissueSample" - } - ] - } - ], - "properties": [ - { - "@id": "schema:affectedBy", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalTest", - "rangeIncludes": "schema:Drug", - "rdfs:comment": "Drugs that affect the test's results.", - "rdfs:label": "affectedBy" - }, - { - "@id": "schema:normalRange", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalTest", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Range of acceptable values for a typical patient, when applicable.", - "rdfs:label": "normalRange" - }, - { - "@id": "schema:signDetected", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalTest", - "rangeIncludes": "schema:MedicalSign", - "rdfs:comment": "A sign detected by the test.", - "rdfs:label": "signDetected" - }, - { - "@id": "schema:usedToDiagnose", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalTest", - "rangeIncludes": "schema:MedicalCondition", - "rdfs:comment": "A condition the test is used to diagnose.", - "rdfs:label": "usedToDiagnose" - }, - { - "@id": "schema:usesDevice", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalTest", - "rangeIncludes": "schema:MedicalDevice", - "rdfs:comment": "Device used to perform the test.", - "rdfs:label": "usesDevice" - } - ] - }, - { - "@id": "schema:MedicalTherapy", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any medical intervention designed to prevent, treat, and cure human diseases and medical conditions, including both curative and palliative therapies. Medical therapies are typically processes of care relying upon pharmacotherapy, behavioral therapy, supportive therapy (with fluid or nutrition for example), or detoxification (e.g. hemodialysis) aimed at improving or preventing a health condition.", - "rdfs:label": "MedicalTherapy", - "rdfs:subClassOf": "schema:MedicalEntity", - "children": [ - { - "@id": "schema:DietarySupplement", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A product taken by mouth that contains a dietary ingredient intended to supplement the diet. Dietary ingredients may include vitamins, minerals, herbs or other botanicals, amino acids, and substances such as enzymes, organ tissues, glandulars and metabolites.", - "rdfs:label": "DietarySupplement", - "rdfs:subClassOf": "schema:MedicalTherapy", - "properties": [ - { - "@id": "schema:activeIngredient", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:Drug", - "schema:DrugStrength" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "An active ingredient, typically chemical compounds and/or biologic substances.", - "rdfs:label": "activeIngredient" - }, - { - "@id": "schema:background", - "@type": "rdf:Property", - "domainIncludes": "schema:DietarySupplement", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Descriptive information establishing a historical perspective on the supplement. May include the rationale for the name, the population where the supplement first came to prominence, etc.", - "rdfs:label": "background" - }, - { - "@id": "schema:dosageForm", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:Drug" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "A dosage form in which this drug/supplement is available, e.g. 'tablet', 'suspension', 'injection'.", - "rdfs:label": "dosageForm" - }, - { - "@id": "schema:isProprietary", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:Drug" - ], - "rangeIncludes": "schema:Boolean", - "rdfs:comment": "True if this item's name is a proprietary/brand name (vs. generic name).", - "rdfs:label": "isProprietary" - }, - { - "@id": "schema:legalStatus", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:Drug" - ], - "rangeIncludes": "schema:DrugLegalStatus", - "rdfs:comment": "The drug or supplement's legal status, including any controlled substance schedules that apply.", - "rdfs:label": "legalStatus" - }, - { - "@id": "schema:manufacturer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:Drug", - "schema:Product" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The manufacturer of the product.", - "rdfs:label": "manufacturer" - }, - { - "@id": "schema:maximumIntake", - "@type": "rdf:Property", - "domainIncludes": "schema:DietarySupplement", - "rangeIncludes": "schema:MaximumDoseSchedule", - "rdfs:comment": "Recommended intake of this supplement for a given population as defined by a specific recommending authority.", - "rdfs:label": "maximumIntake" - }, - { - "@id": "schema:mechanismOfAction", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:Drug" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The specific biochemical interaction through which this drug or supplement produces its pharmacological effect.", - "rdfs:label": "mechanismOfAction" - }, - { - "@id": "schema:nonProprietaryName", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:Drug" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The generic name of this drug or supplement.", - "rdfs:label": "nonProprietaryName" - }, - { - "@id": "schema:recommendedIntake", - "@type": "rdf:Property", - "domainIncludes": "schema:DietarySupplement", - "rangeIncludes": "schema:RecommendedDoseSchedule", - "rdfs:comment": "Recommended intake of this supplement for a given population as defined by a specific recommending authority.", - "rdfs:label": "recommendedIntake" - }, - { - "@id": "schema:safetyConsideration", - "@type": "rdf:Property", - "domainIncludes": "schema:DietarySupplement", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Any potential safety concern associated with the supplement. May include interactions with other drugs and foods, pregnancy, breastfeeding, known adverse reactions, and documented efficacy of the supplement.", - "rdfs:label": "safetyConsideration" - }, - { - "@id": "schema:targetPopulation", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:DoseSchedule" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "Characteristics of the population for which this is intended, or which typically uses it, e.g. 'adults'.", - "rdfs:label": "targetPopulation" - } - ] - }, - { - "@id": "schema:Drug", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A chemical or biologic substance, used as a medical therapy, that has a physiological effect on an organism.", - "rdfs:label": "Drug", - "rdfs:subClassOf": "schema:MedicalTherapy", - "properties": [ - { - "@id": "schema:activeIngredient", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:Drug", - "schema:DrugStrength" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "An active ingredient, typically chemical compounds and/or biologic substances.", - "rdfs:label": "activeIngredient" - }, - { - "@id": "schema:administrationRoute", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A route by which this drug may be administered, e.g. 'oral'.", - "rdfs:label": "administrationRoute" - }, - { - "@id": "schema:alcoholWarning", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Any precaution, guidance, contraindication, etc. related to consumption of alcohol while taking this drug.", - "rdfs:label": "alcoholWarning" - }, - { - "@id": "schema:availableStrength", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:DrugStrength", - "rdfs:comment": "An available dosage strength for the drug.", - "rdfs:label": "availableStrength" - }, - { - "@id": "schema:breastfeedingWarning", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Any precaution, guidance, contraindication, etc. related to this drug's use by breastfeeding mothers.", - "rdfs:label": "breastfeedingWarning" - }, - { - "@id": "schema:clincalPharmacology", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Description of the absorption and elimination of drugs, including their concentration (pharmacokinetics, pK) and biological effects (pharmacodynamics, pD).", - "rdfs:label": "clincalPharmacology" - }, - { - "@id": "schema:cost", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:DrugCost", - "rdfs:comment": "Cost per unit of the drug, as reported by the source being tagged.", - "rdfs:label": "cost" - }, - { - "@id": "schema:dosageForm", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:Drug" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "A dosage form in which this drug/supplement is available, e.g. 'tablet', 'suspension', 'injection'.", - "rdfs:label": "dosageForm" - }, - { - "@id": "schema:doseSchedule", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:DoseSchedule", - "rdfs:comment": "A dosing schedule for the drug for a given population, either observed, recommended, or maximum dose based on the type used.", - "rdfs:label": "doseSchedule" - }, - { - "@id": "schema:drugClass", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:DrugClass", - "rdfs:comment": "The class of drug this belongs to (e.g., statins).", - "rdfs:label": "drugClass" - }, - { - "@id": "schema:foodWarning", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Any precaution, guidance, contraindication, etc. related to consumption of specific foods while taking this drug.", - "rdfs:label": "foodWarning" - }, - { - "@id": "schema:interactingDrug", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:Drug", - "rdfs:comment": "Another drug that is known to interact with this drug in a way that impacts the effect of this drug or causes a risk to the patient. Note: disease interactions are typically captured as contraindications.", - "rdfs:label": "interactingDrug" - }, - { - "@id": "schema:isAvailableGenerically", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:Boolean", - "rdfs:comment": "True if the drug is available in a generic form (regardless of name).", - "rdfs:label": "isAvailableGenerically" - }, - { - "@id": "schema:isProprietary", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:Drug" - ], - "rangeIncludes": "schema:Boolean", - "rdfs:comment": "True if this item's name is a proprietary/brand name (vs. generic name).", - "rdfs:label": "isProprietary" - }, - { - "@id": "schema:labelDetails", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:URL", - "rdfs:comment": "Link to the drug's label details.", - "rdfs:label": "labelDetails" - }, - { - "@id": "schema:legalStatus", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:Drug" - ], - "rangeIncludes": "schema:DrugLegalStatus", - "rdfs:comment": "The drug or supplement's legal status, including any controlled substance schedules that apply.", - "rdfs:label": "legalStatus" - }, - { - "@id": "schema:manufacturer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:Drug", - "schema:Product" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The manufacturer of the product.", - "rdfs:label": "manufacturer" - }, - { - "@id": "schema:mechanismOfAction", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:Drug" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The specific biochemical interaction through which this drug or supplement produces its pharmacological effect.", - "rdfs:label": "mechanismOfAction" - }, - { - "@id": "schema:nonProprietaryName", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:Drug" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The generic name of this drug or supplement.", - "rdfs:label": "nonProprietaryName" - }, - { - "@id": "schema:overdosage", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Any information related to overdose on a drug, including signs or symptoms, treatments, contact information for emergency response.", - "rdfs:label": "overdosage" - }, - { - "@id": "schema:pregnancyCategory", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:DrugPregnancyCategory", - "rdfs:comment": "Pregnancy category of this drug.", - "rdfs:label": "pregnancyCategory" - }, - { - "@id": "schema:pregnancyWarning", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Any precaution, guidance, contraindication, etc. related to this drug's use during pregnancy.", - "rdfs:label": "pregnancyWarning" - }, - { - "@id": "schema:prescribingInfo", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:URL", - "rdfs:comment": "Link to prescribing information for the drug.", - "rdfs:label": "prescribingInfo" - }, - { - "@id": "schema:prescriptionStatus", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:DrugPrescriptionStatus", - "rdfs:comment": "Indicates whether this drug is available by prescription or over-the-counter.", - "rdfs:label": "prescriptionStatus" - }, - { - "@id": "schema:relatedDrug", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": "schema:Drug", - "rdfs:comment": "Any other drug related to this one, for example commonly-prescribed alternatives.", - "rdfs:label": "relatedDrug" - }, - { - "@id": "schema:warning", - "@type": "rdf:Property", - "domainIncludes": "schema:Drug", - "rangeIncludes": [ - "schema:Text", - "schema:URL" - ], - "rdfs:comment": "Any FDA or other warnings about the drug (text or URL).", - "rdfs:label": "warning" - } - ] - }, - { - "@id": "schema:DrugClass", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A class of medical drugs, e.g., statins. Classes can represent general pharmacological class, common mechanisms of action, common physiological effects, etc.", - "rdfs:label": "DrugClass", - "rdfs:subClassOf": "schema:MedicalTherapy", - "properties": [ - { - "@id": "schema:drug", - "@type": "rdf:Property", - "domainIncludes": "schema:DrugClass", - "rangeIncludes": "schema:Drug", - "rdfs:comment": "A drug in this drug class.", - "rdfs:label": "drug" - } - ] - }, - { - "@id": "schema:LifestyleModification", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A process of care involving exercise, changes to diet, fitness routines, and other lifestyle changes aimed at improving a health condition.", - "rdfs:label": "LifestyleModification", - "rdfs:subClassOf": "schema:MedicalTherapy", - "children": [ - { - "@id": "schema:Diet", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A strategy of regulating the intake of food to achieve or maintain a specific health-related goal.", - "rdfs:label": "Diet", - "rdfs:subClassOf": [ - "schema:CreativeWork", - "schema:LifestyleModification" - ], - "properties": [ - { - "@id": "schema:dietFeatures", - "@type": "rdf:Property", - "domainIncludes": "schema:Diet", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Nutritional information specific to the dietary plan. May include dietary recommendations on what foods to avoid, what foods to consume, and specific alterations/deviations from the USDA or other regulatory body's approved dietary guidelines.", - "rdfs:label": "dietFeatures" - }, - { - "@id": "schema:endorsers", - "@type": "rdf:Property", - "domainIncludes": "schema:Diet", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "People or organizations that endorse the plan.", - "rdfs:label": "endorsers" - }, - { - "@id": "schema:expertConsiderations", - "@type": "rdf:Property", - "domainIncludes": "schema:Diet", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Medical expert advice related to the plan.", - "rdfs:label": "expertConsiderations" - }, - { - "@id": "schema:overview", - "@type": "rdf:Property", - "domainIncludes": "schema:Diet", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Descriptive information establishing the overarching theory/philosophy of the plan. May include the rationale for the name, the population where the plan first came to prominence, etc.", - "rdfs:label": "overview" - }, - { - "@id": "schema:physiologicalBenefits", - "@type": "rdf:Property", - "domainIncludes": "schema:Diet", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Specific physiologic benefits associated to the plan.", - "rdfs:label": "physiologicalBenefits" - }, - { - "@id": "schema:proprietaryName", - "@type": "rdf:Property", - "domainIncludes": "schema:Diet", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Proprietary name given to the diet plan, typically by its originator or creator.", - "rdfs:label": "proprietaryName" - }, - { - "@id": "schema:risks", - "@type": "rdf:Property", - "domainIncludes": "schema:Diet", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Specific physiologic risks associated to the plan.", - "rdfs:label": "risks" - } - ] - }, - { - "@id": "schema:PhysicalActivity", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Any bodily activity that enhances or maintains physical fitness and overall health and wellness. Includes activity that is part of daily living and routine, structured exercise, and exercise prescribed as part of a medical treatment or recovery plan.", - "rdfs:label": "PhysicalActivity", - "rdfs:subClassOf": "schema:LifestyleModification", - "children": [ - { - "@id": "schema:ExercisePlan", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Fitness-related activity designed for a specific health-related purpose, including defined exercise routines as well as activity prescribed by a clinician.", - "rdfs:label": "ExercisePlan", - "rdfs:subClassOf": [ - "schema:CreativeWork", - "schema:PhysicalActivity" - ], - "properties": [ - { - "@id": "schema:activityDuration", - "@type": "rdf:Property", - "domainIncludes": "schema:ExercisePlan", - "rangeIncludes": "schema:Duration", - "rdfs:comment": "Length of time to engage in the activity.", - "rdfs:label": "activityDuration" - }, - { - "@id": "schema:activityFrequency", - "@type": "rdf:Property", - "domainIncludes": "schema:ExercisePlan", - "rangeIncludes": "schema:Text", - "rdfs:comment": "How often one should engage in the activity.", - "rdfs:label": "activityFrequency" - }, - { - "@id": "schema:additionalVariable", - "@type": "rdf:Property", - "domainIncludes": "schema:ExercisePlan", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Any additional component of the exercise prescription that may need to be articulated to the patient. This may include the order of exercises, the number of repetitions of movement, quantitative distance, progressions over time, etc.", - "rdfs:label": "additionalVariable" - }, - { - "@id": "schema:exerciseType", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:ExercisePlan", - "schema:ExerciseAction" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "Type(s) of exercise or activity, such as strength training, flexibility training, aerobics, cardiac rehabilitation, etc.", - "rdfs:label": "exerciseType" - }, - { - "@id": "schema:intensity", - "@type": "rdf:Property", - "domainIncludes": "schema:ExercisePlan", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Quantitative measure gauging the degree of force involved in the exercise, for example, heartbeats per minute. May include the velocity of the movement.", - "rdfs:label": "intensity" - }, - { - "@id": "schema:repetitions", - "@type": "rdf:Property", - "domainIncludes": "schema:ExercisePlan", - "rangeIncludes": "schema:Number", - "rdfs:comment": "Number of times one should repeat the activity.", - "rdfs:label": "repetitions" - }, - { - "@id": "schema:restPeriods", - "@type": "rdf:Property", - "domainIncludes": "schema:ExercisePlan", - "rangeIncludes": "schema:Text", - "rdfs:comment": "How often one should break from the activity.", - "rdfs:label": "restPeriods" - }, - { - "@id": "schema:workload", - "@type": "rdf:Property", - "domainIncludes": "schema:ExercisePlan", - "rangeIncludes": "schema:Energy", - "rdfs:comment": "Quantitative measure of the physiologic output of the exercise; also referred to as energy expenditure.", - "rdfs:label": "workload" - } - ] - } - ], - "properties": [ - { - "@id": "schema:associatedAnatomy", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:PhysicalActivity", - "schema:MedicalCondition" - ], - "rangeIncludes": [ - "schema:AnatomicalStructure", - "schema:AnatomicalSystem", - "schema:SuperficialAnatomy" - ], - "rdfs:comment": "The anatomy of the underlying organ system or structures associated with this entity.", - "rdfs:label": "associatedAnatomy" - }, - { - "@id": "schema:category", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:PhysicalActivity" - ], - "rangeIncludes": [ - "schema:PhysicalActivityCategory", - "schema:Text", - "schema:Thing" - ], - "rdfs:comment": "A category for the item. Greater signs or slashes can be used to informally indicate a category hierarchy.", - "rdfs:label": "category" - }, - { - "@id": "schema:epidemiology", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:PhysicalActivity", - "schema:MedicalCondition" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The characteristics of associated patients, such as age, gender, race etc.", - "rdfs:label": "epidemiology" - }, - { - "@id": "schema:pathophysiology", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:PhysicalActivity", - "schema:MedicalCondition" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "Changes in the normal mechanical, physical, and biochemical functions that are associated with this activity or condition.", - "rdfs:label": "pathophysiology" - } - ] - } - ] - }, - { - "@id": "schema:PalliativeProcedure", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A medical procedure intended primarily for palliative purposes, aimed at relieving the symptoms of an underlying health condition.", - "rdfs:label": "PalliativeProcedure", - "rdfs:subClassOf": [ - "schema:MedicalProcedure", - "schema:MedicalTherapy" - ] - }, - { - "@id": "schema:PhysicalTherapy", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A process of progressive physical care and rehabilitation aimed at improving a health condition.", - "rdfs:label": "PhysicalTherapy", - "rdfs:subClassOf": "schema:MedicalTherapy" - }, - { - "@id": "schema:PsychologicalTreatment", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A process of care relying upon counseling, dialogue, communication, verbalization aimed at improving a mental health condition.", - "rdfs:label": "PsychologicalTreatment", - "rdfs:subClassOf": "schema:MedicalTherapy" - }, - { - "@id": "schema:RadiationTherapy", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A process of care using radiation aimed at improving a health condition.", - "rdfs:label": "RadiationTherapy", - "rdfs:subClassOf": "schema:MedicalTherapy" - }, - { - "@id": "schema:TherapeuticProcedure", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "A medical procedure intended primarily for therapeutic purposes, aimed at improving a health condition.", - "rdfs:label": "TherapeuticProcedure", - "rdfs:subClassOf": [ - "schema:MedicalProcedure", - "schema:MedicalTherapy" - ] - } - ], - "properties": [ - { - "@id": "schema:adverseOutcome", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MedicalTherapy", - "schema:MedicalDevice" - ], - "rangeIncludes": "schema:MedicalEntity", - "rdfs:comment": "A possible complication and/or side effect of this therapy. If it is known that an adverse outcome is serious (resulting in death, disability, or permanent damage; requiring hospitalization; or is otherwise life-threatening or requires immediate medical attention), tag it as a seriouseAdverseOutcome instead.", - "rdfs:label": "adverseOutcome" - }, - { - "@id": "schema:contraindication", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MedicalTherapy", - "schema:MedicalDevice" - ], - "rangeIncludes": "schema:MedicalContraindication", - "rdfs:comment": "A contraindication for this therapy.", - "rdfs:label": "contraindication" - }, - { - "@id": "schema:duplicateTherapy", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalTherapy", - "rangeIncludes": "schema:MedicalTherapy", - "rdfs:comment": "A therapy that duplicates or overlaps this one.", - "rdfs:label": "duplicateTherapy" - }, - { - "@id": "schema:indication", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MedicalTherapy", - "schema:MedicalDevice" - ], - "rangeIncludes": "schema:MedicalIndication", - "rdfs:comment": "A factor that indicates use of this therapy for treatment and/or prevention of a condition, symptom, etc. For therapies such as drugs, indications can include both officially-approved indications as well as off-label uses. These can be distinguished by using the ApprovedIndication subtype of MedicalIndication.", - "rdfs:label": "indication" - }, - { - "@id": "schema:seriousAdverseOutcome", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MedicalTherapy", - "schema:MedicalDevice" - ], - "rangeIncludes": "schema:MedicalEntity", - "rdfs:comment": "A possible serious complication and/or serious side effect of this therapy. Serious adverse outcomes include those that are life-threatening; result in death, disability, or permanent damage; require hospitalization or prolong existing hospitalization; cause congenital anomalies or birth defects; or jeopardize the patient and may require medical or surgical intervention to prevent one of the outcomes in this definition.", - "rdfs:label": "seriousAdverseOutcome" - } - ] - }, - { - "@id": "schema:SuperficialAnatomy", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_WikiDoc" - }, - "rdfs:comment": "Anatomical features that can be observed by sight (without dissection), including the form and proportions of the human body as well as surface landmarks that correspond to deeper subcutaneous structures. Superficial anatomy plays an important role in sports medicine, phlebotomy, and other medical specialties as underlying anatomical structures can be identified through surface palpation. For example, during back surgery, superficial anatomy can be used to palpate and count vertebrae to find the site of incision. Or in phlebotomy, superficial anatomy can be used to locate an underlying vein; for example, the median cubital vein can be located by palpating the borders of the cubital fossa (such as the epicondyles of the humerus) and then looking for the superficial signs of the vein, such as size, prominence, ability to refill after depression, and feel of surrounding tissue support. As another example, in a subluxation (dislocation) of the glenohumeral joint, the bony structure becomes pronounced with the deltoid muscle failing to cover the glenohumeral joint allowing the edges of the scapula to be superficially visible. Here, the superficial anatomy is the visible edges of the scapula, implying the underlying dislocation of the joint (the related anatomical structure).", - "rdfs:label": "SuperficialAnatomy", - "rdfs:subClassOf": "schema:MedicalEntity", - "properties": [ - { - "@id": "schema:associatedPathophysiology", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:AnatomicalStructure", - "schema:AnatomicalSystem", - "schema:SuperficialAnatomy" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "If applicable, a description of the pathophysiology associated with the anatomical system, including potential abnormal changes in the mechanical, physical, and biochemical functions of the system.", - "rdfs:label": "associatedPathophysiology" - }, - { - "@id": "schema:relatedAnatomy", - "@type": "rdf:Property", - "domainIncludes": "schema:SuperficialAnatomy", - "rangeIncludes": [ - "schema:AnatomicalStructure", - "schema:AnatomicalSystem" - ], - "rdfs:comment": "Anatomical systems or structures that relate to the superficial anatomy.", - "rdfs:label": "relatedAnatomy" - }, - { - "@id": "schema:relatedCondition", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:AnatomicalStructure", - "schema:AnatomicalSystem", - "schema:SuperficialAnatomy" - ], - "rangeIncludes": "schema:MedicalCondition", - "rdfs:comment": "A medical condition associated with this anatomy.", - "rdfs:label": "relatedCondition" - }, - { - "@id": "schema:relatedTherapy", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:AnatomicalStructure", - "schema:AnatomicalSystem", - "schema:SuperficialAnatomy" - ], - "rangeIncludes": "schema:MedicalTherapy", - "rdfs:comment": "A medical therapy related to this anatomy.", - "rdfs:label": "relatedTherapy" - }, - { - "@id": "schema:significance", - "@type": "rdf:Property", - "domainIncludes": "schema:SuperficialAnatomy", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The significance associated with the superficial anatomy; as an example, how characteristics of the superficial anatomy can suggest underlying medical conditions or courses of treatment.", - "rdfs:label": "significance" - } - ] - } - ], - "properties": [ - { - "@id": "schema:alternateName", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Thing", - "schema:MedicalEntity" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "An alias for the item.", - "rdfs:label": "alternateName" - }, - { - "@id": "schema:code", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalEntity", - "rangeIncludes": "schema:MedicalCode", - "rdfs:comment": "A medical code for the entity, taken from a controlled vocabulary or ontology such as ICD-9, DiseasesDB, MeSH, SNOMED-CT, RxNorm, etc.", - "rdfs:label": "code" - }, - { - "@id": "schema:guideline", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalEntity", - "rangeIncludes": "schema:MedicalGuideline", - "rdfs:comment": "A medical guideline related to this entity.", - "rdfs:label": "guideline" - }, - { - "@id": "schema:medicineSystem", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalEntity", - "rangeIncludes": "schema:MedicineSystem", - "rdfs:comment": "The system of medicine that includes this MedicalEntity, for example 'evidence-based', 'homeopathic', 'chiropractic', etc.", - "rdfs:label": "medicineSystem" - }, - { - "@id": "schema:recognizingAuthority", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalEntity", - "rangeIncludes": "schema:Organization", - "rdfs:comment": "If applicable, the organization that officially recognizes this entity as part of its endorsed system of medicine.", - "rdfs:label": "recognizingAuthority" - }, - { - "@id": "schema:relevantSpecialty", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalEntity", - "rangeIncludes": "schema:MedicalSpecialty", - "rdfs:comment": "If applicable, a medical specialty in which this entity is relevant.", - "rdfs:label": "relevantSpecialty" - }, - { - "@id": "schema:study", - "@type": "rdf:Property", - "domainIncludes": "schema:MedicalEntity", - "rangeIncludes": "schema:MedicalStudy", - "rdfs:comment": "A medical study or trial related to this entity.", - "rdfs:label": "study" - } - ] - }, - { - "@id": "schema:Organization", - "@type": "rdfs:Class", - "rdfs:comment": "An organization such as a school, NGO, corporation, club, etc.", - "rdfs:label": "Organization", - "rdfs:subClassOf": "schema:Thing", - "children": [ - { - "@id": "schema:Airline", - "@type": "rdfs:Class", - "rdfs:comment": "An organization that provides flights for passengers.", - "rdfs:label": "Airline", - "rdfs:subClassOf": "schema:Organization", - "properties": [ - { - "@id": "schema:iataCode", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Airline", - "schema:Airport" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "IATA identifier for an airline or airport", - "rdfs:label": "iataCode" - } - ] - }, - { - "@id": "schema:Corporation", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_rNews" - }, - "rdfs:comment": "Organization: A business corporation.", - "rdfs:label": "Corporation", - "rdfs:subClassOf": "schema:Organization", - "properties": [ - { - "@id": "schema:tickerSymbol", - "@type": "rdf:Property", - "domainIncludes": "schema:Corporation", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The exchange traded instrument associated with a Corporation object. The tickerSymbol is expressed as an exchange and an instrument name separated by a space character. For the exchange component of the tickerSymbol attribute, we reccommend using the controlled vocaulary of Market Identifier Codes (MIC) specified in ISO15022.", - "rdfs:label": "tickerSymbol" - } - ] - }, - { - "@id": "schema:EducationalOrganization", - "@type": "rdfs:Class", - "rdfs:comment": "An educational organization.", - "rdfs:label": "EducationalOrganization", - "rdfs:subClassOf": "schema:Organization", - "children": [ - { - "@id": "schema:CollegeOrUniversity", - "@type": "rdfs:Class", - "rdfs:comment": "A college, university, or other third-level educational institution.", - "rdfs:label": "CollegeOrUniversity", - "rdfs:subClassOf": "schema:EducationalOrganization" - }, - { - "@id": "schema:ElementarySchool", - "@type": "rdfs:Class", - "rdfs:comment": "An elementary school.", - "rdfs:label": "ElementarySchool", - "rdfs:subClassOf": "schema:EducationalOrganization" - }, - { - "@id": "schema:HighSchool", - "@type": "rdfs:Class", - "rdfs:comment": "A high school.", - "rdfs:label": "HighSchool", - "rdfs:subClassOf": "schema:EducationalOrganization" - }, - { - "@id": "schema:MiddleSchool", - "@type": "rdfs:Class", - "rdfs:comment": "A middle school.", - "rdfs:label": "MiddleSchool", - "rdfs:subClassOf": "schema:EducationalOrganization" - }, - { - "@id": "schema:Preschool", - "@type": "rdfs:Class", - "rdfs:comment": "A preschool.", - "rdfs:label": "Preschool", - "rdfs:subClassOf": "schema:EducationalOrganization" - }, - { - "@id": "schema:School", - "@type": "rdfs:Class", - "rdfs:comment": "A school.", - "rdfs:label": "School", - "rdfs:subClassOf": "schema:EducationalOrganization" - } - ], - "properties": [ - { - "@id": "schema:alumni", - "@type": "rdf:Property", - "domainIncludes": "schema:EducationalOrganization", - "inverseOf": "schema:alumniOf", - "rangeIncludes": "schema:Person", - "rdfs:comment": "Alumni of educational organization.", - "rdfs:label": "alumni" - } - ] - }, - { - "@id": "schema:GovernmentOrganization", - "@type": "rdfs:Class", - "rdfs:comment": "A governmental organization or agency.", - "rdfs:label": "GovernmentOrganization", - "rdfs:subClassOf": "schema:Organization" - }, - { - "@id": "schema:LocalBusiness", - "@type": "rdfs:Class", - "rdfs:comment": "A particular physical business or branch of an organization. Examples of LocalBusiness include a restaurant, a particular branch of a restaurant chain, a branch of a bank, a medical practice, a club, a bowling alley, etc.", - "rdfs:label": "LocalBusiness", - "rdfs:subClassOf": [ - "schema:Organization", - "schema:Place" - ], - "children": [ - { - "@id": "schema:AnimalShelter", - "@type": "rdfs:Class", - "rdfs:comment": "Animal shelter.", - "rdfs:label": "AnimalShelter", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:AutomotiveBusiness", - "@type": "rdfs:Class", - "rdfs:comment": "Car repair, sales, or parts.", - "rdfs:label": "AutomotiveBusiness", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:AutoBodyShop", - "@type": "rdfs:Class", - "rdfs:comment": "Auto body shop.", - "rdfs:label": "AutoBodyShop", - "rdfs:subClassOf": "schema:AutomotiveBusiness" - }, - { - "@id": "schema:AutoDealer", - "@type": "rdfs:Class", - "rdfs:comment": "An car dealership.", - "rdfs:label": "AutoDealer", - "rdfs:subClassOf": "schema:AutomotiveBusiness" - }, - { - "@id": "schema:AutoPartsStore", - "@type": "rdfs:Class", - "rdfs:comment": "An auto parts store.", - "rdfs:label": "AutoPartsStore", - "rdfs:subClassOf": [ - "schema:AutomotiveBusiness", - "schema:Store" - ] - }, - { - "@id": "schema:AutoRental", - "@type": "rdfs:Class", - "rdfs:comment": "A car rental business.", - "rdfs:label": "AutoRental", - "rdfs:subClassOf": "schema:AutomotiveBusiness" - }, - { - "@id": "schema:AutoRepair", - "@type": "rdfs:Class", - "rdfs:comment": "Car repair business.", - "rdfs:label": "AutoRepair", - "rdfs:subClassOf": "schema:AutomotiveBusiness" - }, - { - "@id": "schema:AutoWash", - "@type": "rdfs:Class", - "rdfs:comment": "A car wash business.", - "rdfs:label": "AutoWash", - "rdfs:subClassOf": "schema:AutomotiveBusiness" - }, - { - "@id": "schema:GasStation", - "@type": "rdfs:Class", - "rdfs:comment": "A gas station.", - "rdfs:label": "GasStation", - "rdfs:subClassOf": "schema:AutomotiveBusiness" - }, - { - "@id": "schema:MotorcycleDealer", - "@type": "rdfs:Class", - "rdfs:comment": "A motorcycle dealer.", - "rdfs:label": "MotorcycleDealer", - "rdfs:subClassOf": "schema:AutomotiveBusiness" - }, - { - "@id": "schema:MotorcycleRepair", - "@type": "rdfs:Class", - "rdfs:comment": "A motorcycle repair shop.", - "rdfs:label": "MotorcycleRepair", - "rdfs:subClassOf": "schema:AutomotiveBusiness" - } - ] - }, - { - "@id": "schema:ChildCare", - "@type": "rdfs:Class", - "rdfs:comment": "A Childcare center.", - "rdfs:label": "ChildCare", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:DryCleaningOrLaundry", - "@type": "rdfs:Class", - "rdfs:comment": "A dry-cleaning business.", - "rdfs:label": "DryCleaningOrLaundry", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:EmergencyService", - "@type": "rdfs:Class", - "rdfs:comment": "An emergency service, such as a fire station or ER.", - "rdfs:label": "EmergencyService", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:FireStation", - "@type": "rdfs:Class", - "rdfs:comment": "A fire station. With firemen.", - "rdfs:label": "FireStation", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:EmergencyService" - ] - }, - { - "@id": "schema:Hospital", - "@type": "rdfs:Class", - "rdfs:comment": "A hospital.", - "rdfs:label": "Hospital", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:EmergencyService", - "schema:MedicalOrganization" - ], - "properties": [ - { - "@id": "schema:availableService", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": [ - "schema:MedicalProcedure", - "schema:MedicalTest", - "schema:MedicalTherapy" - ], - "rdfs:comment": "A medical service available from this provider.", - "rdfs:label": "availableService" - }, - { - "@id": "schema:medicalSpecialty", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": "schema:MedicalSpecialty", - "rdfs:comment": "A medical specialty of the provider.", - "rdfs:label": "medicalSpecialty" - } - ] - }, - { - "@id": "schema:PoliceStation", - "@type": "rdfs:Class", - "rdfs:comment": "A police station.", - "rdfs:label": "PoliceStation", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:EmergencyService" - ] - } - ] - }, - { - "@id": "schema:EmploymentAgency", - "@type": "rdfs:Class", - "rdfs:comment": "An employment agency.", - "rdfs:label": "EmploymentAgency", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:EntertainmentBusiness", - "@type": "rdfs:Class", - "rdfs:comment": "A business providing entertainment.", - "rdfs:label": "EntertainmentBusiness", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:AdultEntertainment", - "@type": "rdfs:Class", - "rdfs:comment": "An adult entertainment establishment.", - "rdfs:label": "AdultEntertainment", - "rdfs:subClassOf": "schema:EntertainmentBusiness" - }, - { - "@id": "schema:AmusementPark", - "@type": "rdfs:Class", - "rdfs:comment": "An amusement park.", - "rdfs:label": "AmusementPark", - "rdfs:subClassOf": "schema:EntertainmentBusiness" - }, - { - "@id": "schema:ArtGallery", - "@type": "rdfs:Class", - "rdfs:comment": "An art gallery.", - "rdfs:label": "ArtGallery", - "rdfs:subClassOf": "schema:EntertainmentBusiness" - }, - { - "@id": "schema:Casino", - "@type": "rdfs:Class", - "rdfs:comment": "A casino.", - "rdfs:label": "Casino", - "rdfs:subClassOf": "schema:EntertainmentBusiness" - }, - { - "@id": "schema:ComedyClub", - "@type": "rdfs:Class", - "rdfs:comment": "A comedy club.", - "rdfs:label": "ComedyClub", - "rdfs:subClassOf": "schema:EntertainmentBusiness" - }, - { - "@id": "schema:MovieTheater", - "@type": "rdfs:Class", - "rdfs:comment": "A movie theater.", - "rdfs:label": "MovieTheater", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:EntertainmentBusiness" - ] - }, - { - "@id": "schema:NightClub", - "@type": "rdfs:Class", - "rdfs:comment": "A nightclub or discotheque.", - "rdfs:label": "NightClub", - "rdfs:subClassOf": "schema:EntertainmentBusiness" - } - ] - }, - { - "@id": "schema:FinancialService", - "@type": "rdfs:Class", - "rdfs:comment": "Financial services business.", - "rdfs:label": "FinancialService", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:AccountingService", - "@type": "rdfs:Class", - "rdfs:comment": "Accountancy business.", - "rdfs:label": "AccountingService", - "rdfs:subClassOf": [ - "schema:FinancialService", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:AutomatedTeller", - "@type": "rdfs:Class", - "rdfs:comment": "ATM/cash machine.", - "rdfs:label": "AutomatedTeller", - "rdfs:subClassOf": "schema:FinancialService" - }, - { - "@id": "schema:BankOrCreditUnion", - "@type": "rdfs:Class", - "rdfs:comment": "Bank or credit union.", - "rdfs:label": "BankOrCreditUnion", - "rdfs:subClassOf": "schema:FinancialService" - }, - { - "@id": "schema:InsuranceAgency", - "@type": "rdfs:Class", - "rdfs:comment": "Insurance agency.", - "rdfs:label": "InsuranceAgency", - "rdfs:subClassOf": "schema:FinancialService" - } - ] - }, - { - "@id": "schema:FoodEstablishment", - "@type": "rdfs:Class", - "rdfs:comment": "A food-related business.", - "rdfs:label": "FoodEstablishment", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:Bakery", - "@type": "rdfs:Class", - "rdfs:comment": "A bakery.", - "rdfs:label": "Bakery", - "rdfs:subClassOf": "schema:FoodEstablishment" - }, - { - "@id": "schema:BarOrPub", - "@type": "rdfs:Class", - "rdfs:comment": "A bar or pub.", - "rdfs:label": "BarOrPub", - "rdfs:subClassOf": "schema:FoodEstablishment" - }, - { - "@id": "schema:Brewery", - "@type": "rdfs:Class", - "rdfs:comment": "Brewery.", - "rdfs:label": "Brewery", - "rdfs:subClassOf": "schema:FoodEstablishment" - }, - { - "@id": "schema:CafeOrCoffeeShop", - "@type": "rdfs:Class", - "rdfs:comment": "A cafe or coffee shop.", - "rdfs:label": "CafeOrCoffeeShop", - "rdfs:subClassOf": "schema:FoodEstablishment" - }, - { - "@id": "schema:FastFoodRestaurant", - "@type": "rdfs:Class", - "rdfs:comment": "A fast-food restaurant.", - "rdfs:label": "FastFoodRestaurant", - "rdfs:subClassOf": "schema:FoodEstablishment" - }, - { - "@id": "schema:IceCreamShop", - "@type": "rdfs:Class", - "rdfs:comment": "An ice cream shop", - "rdfs:label": "IceCreamShop", - "rdfs:subClassOf": "schema:FoodEstablishment" - }, - { - "@id": "schema:Restaurant", - "@type": "rdfs:Class", - "rdfs:comment": "A restaurant.", - "rdfs:label": "Restaurant", - "rdfs:subClassOf": "schema:FoodEstablishment" - }, - { - "@id": "schema:Winery", - "@type": "rdfs:Class", - "rdfs:comment": "A winery.", - "rdfs:label": "Winery", - "rdfs:subClassOf": "schema:FoodEstablishment" - } - ], - "properties": [ - { - "@id": "schema:acceptsReservations", - "@type": "rdf:Property", - "domainIncludes": "schema:FoodEstablishment", - "rangeIncludes": [ - "schema:Text", - "schema:URL", - "schema:Boolean" - ], - "rdfs:comment": "Indicates whether a FoodEstablishment accepts reservations. Values can be Boolean, an URL at which reservations can be made or (for backwards compatibility) the strings Yes or No.", - "rdfs:label": "acceptsReservations" - }, - { - "@id": "schema:menu", - "@type": "rdf:Property", - "domainIncludes": "schema:FoodEstablishment", - "rangeIncludes": [ - "schema:Text", - "schema:URL" - ], - "rdfs:comment": "Either the actual menu or a URL of the menu.", - "rdfs:label": "menu" - }, - { - "@id": "schema:servesCuisine", - "@type": "rdf:Property", - "domainIncludes": "schema:FoodEstablishment", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The cuisine of the restaurant.", - "rdfs:label": "servesCuisine" - } - ] - }, - { - "@id": "schema:GovernmentOffice", - "@type": "rdfs:Class", - "rdfs:comment": "A government office—for example, an IRS or DMV office.", - "rdfs:label": "GovernmentOffice", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:PostOffice", - "@type": "rdfs:Class", - "rdfs:comment": "A post office.", - "rdfs:label": "PostOffice", - "rdfs:subClassOf": "schema:GovernmentOffice" - } - ] - }, - { - "@id": "schema:HealthAndBeautyBusiness", - "@type": "rdfs:Class", - "rdfs:comment": "Health and beauty.", - "rdfs:label": "HealthAndBeautyBusiness", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:BeautySalon", - "@type": "rdfs:Class", - "rdfs:comment": "Beauty salon.", - "rdfs:label": "BeautySalon", - "rdfs:subClassOf": "schema:HealthAndBeautyBusiness" - }, - { - "@id": "schema:DaySpa", - "@type": "rdfs:Class", - "rdfs:comment": "A day spa.", - "rdfs:label": "DaySpa", - "rdfs:subClassOf": "schema:HealthAndBeautyBusiness" - }, - { - "@id": "schema:HairSalon", - "@type": "rdfs:Class", - "rdfs:comment": "A hair salon.", - "rdfs:label": "HairSalon", - "rdfs:subClassOf": "schema:HealthAndBeautyBusiness" - }, - { - "@id": "schema:HealthClub", - "@type": "rdfs:Class", - "rdfs:comment": "A health club.", - "rdfs:label": "HealthClub", - "rdfs:subClassOf": [ - "schema:HealthAndBeautyBusiness", - "schema:SportsActivityLocation" - ] - }, - { - "@id": "schema:NailSalon", - "@type": "rdfs:Class", - "rdfs:comment": "A nail salon.", - "rdfs:label": "NailSalon", - "rdfs:subClassOf": "schema:HealthAndBeautyBusiness" - }, - { - "@id": "schema:TattooParlor", - "@type": "rdfs:Class", - "rdfs:comment": "A tattoo parlor.", - "rdfs:label": "TattooParlor", - "rdfs:subClassOf": "schema:HealthAndBeautyBusiness" - } - ] - }, - { - "@id": "schema:HomeAndConstructionBusiness", - "@type": "rdfs:Class", - "rdfs:comment": "A construction business.", - "rdfs:label": "HomeAndConstructionBusiness", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:Electrician", - "@type": "rdfs:Class", - "rdfs:comment": "An electrician.", - "rdfs:label": "Electrician", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:GeneralContractor", - "@type": "rdfs:Class", - "rdfs:comment": "A general contractor.", - "rdfs:label": "GeneralContractor", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:HVACBusiness", - "@type": "rdfs:Class", - "rdfs:comment": "An HVAC service.", - "rdfs:label": "HVACBusiness", - "rdfs:subClassOf": "schema:HomeAndConstructionBusiness" - }, - { - "@id": "schema:HousePainter", - "@type": "rdfs:Class", - "rdfs:comment": "A house painting service.", - "rdfs:label": "HousePainter", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:Locksmith", - "@type": "rdfs:Class", - "rdfs:comment": "A locksmith.", - "rdfs:label": "Locksmith", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:MovingCompany", - "@type": "rdfs:Class", - "rdfs:comment": "A moving company.", - "rdfs:label": "MovingCompany", - "rdfs:subClassOf": "schema:HomeAndConstructionBusiness" - }, - { - "@id": "schema:Plumber", - "@type": "rdfs:Class", - "rdfs:comment": "A plumbing service.", - "rdfs:label": "Plumber", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:RoofingContractor", - "@type": "rdfs:Class", - "rdfs:comment": "A roofing contractor.", - "rdfs:label": "RoofingContractor", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - } - ] - }, - { - "@id": "schema:InternetCafe", - "@type": "rdfs:Class", - "rdfs:comment": "An internet cafe.", - "rdfs:label": "InternetCafe", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:Library", - "@type": "rdfs:Class", - "rdfs:comment": "A library.", - "rdfs:label": "Library", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:LodgingBusiness", - "@type": "rdfs:Class", - "rdfs:comment": "A lodging business, such as a motel, hotel, or inn.", - "rdfs:label": "LodgingBusiness", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:BedAndBreakfast", - "@type": "rdfs:Class", - "rdfs:comment": "Bed and breakfast.", - "rdfs:label": "BedAndBreakfast", - "rdfs:subClassOf": "schema:LodgingBusiness" - }, - { - "@id": "schema:Hostel", - "@type": "rdfs:Class", - "rdfs:comment": "A hostel.", - "rdfs:label": "Hostel", - "rdfs:subClassOf": "schema:LodgingBusiness" - }, - { - "@id": "schema:Hotel", - "@type": "rdfs:Class", - "rdfs:comment": "A hotel.", - "rdfs:label": "Hotel", - "rdfs:subClassOf": "schema:LodgingBusiness" - }, - { - "@id": "schema:Motel", - "@type": "rdfs:Class", - "rdfs:comment": "A motel.", - "rdfs:label": "Motel", - "rdfs:subClassOf": "schema:LodgingBusiness" - } - ] - }, - { - "@id": "schema:MedicalOrganization", - "@type": "rdfs:Class", - "rdfs:comment": "A medical organization, such as a doctor's office or clinic.", - "rdfs:label": "MedicalOrganization", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:Dentist", - "@type": "rdfs:Class", - "rdfs:comment": "A dentist.", - "rdfs:label": "Dentist", - "rdfs:subClassOf": [ - "schema:MedicalOrganization", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:DiagnosticLab", - "@type": "rdfs:Class", - "rdfs:comment": "A medical laboratory that offers on-site or off-site diagnostic services.", - "rdfs:label": "DiagnosticLab", - "rdfs:subClassOf": "schema:MedicalOrganization", - "properties": [ - { - "@id": "schema:availableTest", - "@type": "rdf:Property", - "domainIncludes": "schema:DiagnosticLab", - "rangeIncludes": "schema:MedicalTest", - "rdfs:comment": "A diagnostic test or procedure offered by this lab.", - "rdfs:label": "availableTest" - } - ] - }, - { - "@id": "schema:Hospital", - "@type": "rdfs:Class", - "rdfs:comment": "A hospital.", - "rdfs:label": "Hospital", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:EmergencyService", - "schema:MedicalOrganization" - ], - "properties": [ - { - "@id": "schema:availableService", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": [ - "schema:MedicalProcedure", - "schema:MedicalTest", - "schema:MedicalTherapy" - ], - "rdfs:comment": "A medical service available from this provider.", - "rdfs:label": "availableService" - }, - { - "@id": "schema:medicalSpecialty", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": "schema:MedicalSpecialty", - "rdfs:comment": "A medical specialty of the provider.", - "rdfs:label": "medicalSpecialty" - } - ] - }, - { - "@id": "schema:MedicalClinic", - "@type": "rdfs:Class", - "rdfs:comment": "A medical clinic.", - "rdfs:label": "MedicalClinic", - "rdfs:subClassOf": "schema:MedicalOrganization", - "properties": [ - { - "@id": "schema:availableService", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": [ - "schema:MedicalProcedure", - "schema:MedicalTest", - "schema:MedicalTherapy" - ], - "rdfs:comment": "A medical service available from this provider.", - "rdfs:label": "availableService" - }, - { - "@id": "schema:medicalSpecialty", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": "schema:MedicalSpecialty", - "rdfs:comment": "A medical specialty of the provider.", - "rdfs:label": "medicalSpecialty" - } - ] - }, - { - "@id": "schema:Optician", - "@type": "rdfs:Class", - "rdfs:comment": "An optician's store.", - "rdfs:label": "Optician", - "rdfs:subClassOf": "schema:MedicalOrganization" - }, - { - "@id": "schema:Pharmacy", - "@type": "rdfs:Class", - "rdfs:comment": "A pharmacy or drugstore.", - "rdfs:label": "Pharmacy", - "rdfs:subClassOf": "schema:MedicalOrganization" - }, - { - "@id": "schema:Physician", - "@type": "rdfs:Class", - "rdfs:comment": "A doctor's office.", - "rdfs:label": "Physician", - "rdfs:subClassOf": "schema:MedicalOrganization", - "properties": [ - { - "@id": "schema:availableService", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": [ - "schema:MedicalProcedure", - "schema:MedicalTest", - "schema:MedicalTherapy" - ], - "rdfs:comment": "A medical service available from this provider.", - "rdfs:label": "availableService" - }, - { - "@id": "schema:hospitalAffiliation", - "@type": "rdf:Property", - "domainIncludes": "schema:Physician", - "rangeIncludes": "schema:Hospital", - "rdfs:comment": "A hospital with which the physician or office is affiliated.", - "rdfs:label": "hospitalAffiliation" - }, - { - "@id": "schema:medicalSpecialty", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": "schema:MedicalSpecialty", - "rdfs:comment": "A medical specialty of the provider.", - "rdfs:label": "medicalSpecialty" - } - ] - }, - { - "@id": "schema:VeterinaryCare", - "@type": "rdfs:Class", - "rdfs:comment": "A vet's office.", - "rdfs:label": "VeterinaryCare", - "rdfs:subClassOf": "schema:MedicalOrganization" - } - ] - }, - { - "@id": "schema:ProfessionalService", - "@type": "rdfs:Class", - "rdfs:comment": "Provider of professional services.", - "rdfs:label": "ProfessionalService", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:AccountingService", - "@type": "rdfs:Class", - "rdfs:comment": "Accountancy business.", - "rdfs:label": "AccountingService", - "rdfs:subClassOf": [ - "schema:FinancialService", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:Attorney", - "@type": "rdfs:Class", - "rdfs:comment": "Professional service: Attorney.", - "rdfs:label": "Attorney", - "rdfs:subClassOf": "schema:ProfessionalService" - }, - { - "@id": "schema:Dentist", - "@type": "rdfs:Class", - "rdfs:comment": "A dentist.", - "rdfs:label": "Dentist", - "rdfs:subClassOf": [ - "schema:MedicalOrganization", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:Electrician", - "@type": "rdfs:Class", - "rdfs:comment": "An electrician.", - "rdfs:label": "Electrician", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:GeneralContractor", - "@type": "rdfs:Class", - "rdfs:comment": "A general contractor.", - "rdfs:label": "GeneralContractor", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:HousePainter", - "@type": "rdfs:Class", - "rdfs:comment": "A house painting service.", - "rdfs:label": "HousePainter", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:Locksmith", - "@type": "rdfs:Class", - "rdfs:comment": "A locksmith.", - "rdfs:label": "Locksmith", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:Notary", - "@type": "rdfs:Class", - "rdfs:comment": "A notary.", - "rdfs:label": "Notary", - "rdfs:subClassOf": "schema:ProfessionalService" - }, - { - "@id": "schema:Plumber", - "@type": "rdfs:Class", - "rdfs:comment": "A plumbing service.", - "rdfs:label": "Plumber", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:RoofingContractor", - "@type": "rdfs:Class", - "rdfs:comment": "A roofing contractor.", - "rdfs:label": "RoofingContractor", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - } - ] - }, - { - "@id": "schema:RadioStation", - "@type": "rdfs:Class", - "rdfs:comment": "A radio station.", - "rdfs:label": "RadioStation", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:RealEstateAgent", - "@type": "rdfs:Class", - "rdfs:comment": "A real-estate agent.", - "rdfs:label": "RealEstateAgent", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:RecyclingCenter", - "@type": "rdfs:Class", - "rdfs:comment": "A recycling center.", - "rdfs:label": "RecyclingCenter", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:SelfStorage", - "@type": "rdfs:Class", - "rdfs:comment": "Self-storage facility.", - "rdfs:label": "SelfStorage", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:ShoppingCenter", - "@type": "rdfs:Class", - "rdfs:comment": "A shopping center or mall.", - "rdfs:label": "ShoppingCenter", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:SportsActivityLocation", - "@type": "rdfs:Class", - "rdfs:comment": "A sports location, such as a playing field.", - "rdfs:label": "SportsActivityLocation", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:BowlingAlley", - "@type": "rdfs:Class", - "rdfs:comment": "A bowling alley.", - "rdfs:label": "BowlingAlley", - "rdfs:subClassOf": "schema:SportsActivityLocation" - }, - { - "@id": "schema:ExerciseGym", - "@type": "rdfs:Class", - "rdfs:comment": "A gym.", - "rdfs:label": "ExerciseGym", - "rdfs:subClassOf": "schema:SportsActivityLocation" - }, - { - "@id": "schema:GolfCourse", - "@type": "rdfs:Class", - "rdfs:comment": "A golf course.", - "rdfs:label": "GolfCourse", - "rdfs:subClassOf": "schema:SportsActivityLocation" - }, - { - "@id": "schema:HealthClub", - "@type": "rdfs:Class", - "rdfs:comment": "A health club.", - "rdfs:label": "HealthClub", - "rdfs:subClassOf": [ - "schema:HealthAndBeautyBusiness", - "schema:SportsActivityLocation" - ] - }, - { - "@id": "schema:PublicSwimmingPool", - "@type": "rdfs:Class", - "rdfs:comment": "A public swimming pool.", - "rdfs:label": "PublicSwimmingPool", - "rdfs:subClassOf": "schema:SportsActivityLocation" - }, - { - "@id": "schema:SkiResort", - "@type": "rdfs:Class", - "rdfs:comment": "A ski resort.", - "rdfs:label": "SkiResort", - "rdfs:subClassOf": "schema:SportsActivityLocation" - }, - { - "@id": "schema:SportsClub", - "@type": "rdfs:Class", - "rdfs:comment": "A sports club.", - "rdfs:label": "SportsClub", - "rdfs:subClassOf": "schema:SportsActivityLocation" - }, - { - "@id": "schema:StadiumOrArena", - "@type": "rdfs:Class", - "rdfs:comment": "A stadium.", - "rdfs:label": "StadiumOrArena", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:SportsActivityLocation" - ] - }, - { - "@id": "schema:TennisComplex", - "@type": "rdfs:Class", - "rdfs:comment": "A tennis complex.", - "rdfs:label": "TennisComplex", - "rdfs:subClassOf": "schema:SportsActivityLocation" - } - ] - }, - { - "@id": "schema:Store", - "@type": "rdfs:Class", - "rdfs:comment": "A retail good store.", - "rdfs:label": "Store", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:AutoPartsStore", - "@type": "rdfs:Class", - "rdfs:comment": "An auto parts store.", - "rdfs:label": "AutoPartsStore", - "rdfs:subClassOf": [ - "schema:AutomotiveBusiness", - "schema:Store" - ] - }, - { - "@id": "schema:BikeStore", - "@type": "rdfs:Class", - "rdfs:comment": "A bike store.", - "rdfs:label": "BikeStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:BookStore", - "@type": "rdfs:Class", - "rdfs:comment": "A bookstore.", - "rdfs:label": "BookStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:ClothingStore", - "@type": "rdfs:Class", - "rdfs:comment": "A clothing store.", - "rdfs:label": "ClothingStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:ComputerStore", - "@type": "rdfs:Class", - "rdfs:comment": "A computer store.", - "rdfs:label": "ComputerStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:ConvenienceStore", - "@type": "rdfs:Class", - "rdfs:comment": "A convenience store.", - "rdfs:label": "ConvenienceStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:DepartmentStore", - "@type": "rdfs:Class", - "rdfs:comment": "A department store.", - "rdfs:label": "DepartmentStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:ElectronicsStore", - "@type": "rdfs:Class", - "rdfs:comment": "An electronics store.", - "rdfs:label": "ElectronicsStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:Florist", - "@type": "rdfs:Class", - "rdfs:comment": "A florist.", - "rdfs:label": "Florist", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:FurnitureStore", - "@type": "rdfs:Class", - "rdfs:comment": "A furniture store.", - "rdfs:label": "FurnitureStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:GardenStore", - "@type": "rdfs:Class", - "rdfs:comment": "A garden store.", - "rdfs:label": "GardenStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:GroceryStore", - "@type": "rdfs:Class", - "rdfs:comment": "A grocery store.", - "rdfs:label": "GroceryStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:HardwareStore", - "@type": "rdfs:Class", - "rdfs:comment": "A hardware store.", - "rdfs:label": "HardwareStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:HobbyShop", - "@type": "rdfs:Class", - "rdfs:comment": "A hobby store.", - "rdfs:label": "HobbyShop", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:HomeGoodsStore", - "@type": "rdfs:Class", - "rdfs:comment": "A home goods store.", - "rdfs:label": "HomeGoodsStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:JewelryStore", - "@type": "rdfs:Class", - "rdfs:comment": "A jewelry store.", - "rdfs:label": "JewelryStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:LiquorStore", - "@type": "rdfs:Class", - "rdfs:comment": "A liquor store.", - "rdfs:label": "LiquorStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:MensClothingStore", - "@type": "rdfs:Class", - "rdfs:comment": "A men's clothing store.", - "rdfs:label": "MensClothingStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:MobilePhoneStore", - "@type": "rdfs:Class", - "rdfs:comment": "A mobile-phone store.", - "rdfs:label": "MobilePhoneStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:MovieRentalStore", - "@type": "rdfs:Class", - "rdfs:comment": "A movie rental store.", - "rdfs:label": "MovieRentalStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:MusicStore", - "@type": "rdfs:Class", - "rdfs:comment": "A music store.", - "rdfs:label": "MusicStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:OfficeEquipmentStore", - "@type": "rdfs:Class", - "rdfs:comment": "An office equipment store.", - "rdfs:label": "OfficeEquipmentStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:OutletStore", - "@type": "rdfs:Class", - "rdfs:comment": "An outlet store.", - "rdfs:label": "OutletStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:PawnShop", - "@type": "rdfs:Class", - "rdfs:comment": "A pawn store.", - "rdfs:label": "PawnShop", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:PetStore", - "@type": "rdfs:Class", - "rdfs:comment": "A pet store.", - "rdfs:label": "PetStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:ShoeStore", - "@type": "rdfs:Class", - "rdfs:comment": "A shoe store.", - "rdfs:label": "ShoeStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:SportingGoodsStore", - "@type": "rdfs:Class", - "rdfs:comment": "A sporting goods store.", - "rdfs:label": "SportingGoodsStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:TireShop", - "@type": "rdfs:Class", - "rdfs:comment": "A tire shop.", - "rdfs:label": "TireShop", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:ToyStore", - "@type": "rdfs:Class", - "rdfs:comment": "A toy store.", - "rdfs:label": "ToyStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:WholesaleStore", - "@type": "rdfs:Class", - "rdfs:comment": "A wholesale store.", - "rdfs:label": "WholesaleStore", - "rdfs:subClassOf": "schema:Store" - } - ] - }, - { - "@id": "schema:TelevisionStation", - "@type": "rdfs:Class", - "rdfs:comment": "A television station.", - "rdfs:label": "TelevisionStation", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:TouristInformationCenter", - "@type": "rdfs:Class", - "rdfs:comment": "A tourist information center.", - "rdfs:label": "TouristInformationCenter", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:TravelAgency", - "@type": "rdfs:Class", - "rdfs:comment": "A travel agency.", - "rdfs:label": "TravelAgency", - "rdfs:subClassOf": "schema:LocalBusiness" - } - ], - "properties": [ - { - "@id": "schema:branchOf", - "@type": "rdf:Property", - "domainIncludes": "schema:LocalBusiness", - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The larger organization that this local business is a branch of, if any.", - "rdfs:label": "branchOf" - }, - { - "@id": "schema:currenciesAccepted", - "@type": "rdf:Property", - "domainIncludes": "schema:LocalBusiness", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The currency accepted (in ISO 4217 currency format).", - "rdfs:label": "currenciesAccepted" - }, - { - "@id": "schema:openingHours", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:LocalBusiness", - "schema:CivicStructure" - ], - "rangeIncludes": "schema:Duration", - "rdfs:comment": "The opening hours for a business. Opening hours can be specified as a weekly time range, starting with days, then times per day. Multiple days can be listed with commas ',' separating each day. Day or time ranges are specified using a hyphen '-'.
- Days are specified using the following two-letter combinations: Mo, Tu, We, Th, Fr, Sa, Su.
- Times are specified using 24:00 time. For example, 3pm is specified as 15:00.
- Here is an example: <time itemprop="openingHours" datetime="Tu,Th 16:00-20:00">Tuesdays and Thursdays 4-8pm</time>.
- If a business is open 7 days a week, then it can be specified as <time itemprop="openingHours" datetime="Mo-Su">Monday through Sunday, all day</time>.", - "rdfs:label": "openingHours" - }, - { - "@id": "schema:paymentAccepted", - "@type": "rdf:Property", - "domainIncludes": "schema:LocalBusiness", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Cash, credit card, etc.", - "rdfs:label": "paymentAccepted" - }, - { - "@id": "schema:priceRange", - "@type": "rdf:Property", - "domainIncludes": "schema:LocalBusiness", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The price range of the business, for example $$$.", - "rdfs:label": "priceRange" - } - ] - }, - { - "@id": "schema:NGO", - "@type": "rdfs:Class", - "rdfs:comment": "Organization: Non-governmental Organization.", - "rdfs:label": "NGO", - "rdfs:subClassOf": "schema:Organization" - }, - { - "@id": "schema:PerformingGroup", - "@type": "rdfs:Class", - "rdfs:comment": "A performance group, such as a band, an orchestra, or a circus.", - "rdfs:label": "PerformingGroup", - "rdfs:subClassOf": "schema:Organization", - "children": [ - { - "@id": "schema:DanceGroup", - "@type": "rdfs:Class", - "rdfs:comment": "A dance group—for example, the Alvin Ailey Dance Theater or Riverdance.", - "rdfs:label": "DanceGroup", - "rdfs:subClassOf": "schema:PerformingGroup" - }, - { - "@id": "schema:MusicGroup", - "@type": "rdfs:Class", - "rdfs:comment": "A musical group, such as a band, an orchestra, or a choir. Can also be a solo musician.", - "rdfs:label": "MusicGroup", - "rdfs:subClassOf": "schema:PerformingGroup", - "properties": [ - { - "@id": "schema:album", - "@type": "rdf:Property", - "domainIncludes": "schema:MusicGroup", - "rangeIncludes": "schema:MusicAlbum", - "rdfs:comment": "A music album.", - "rdfs:label": "album" - }, - { - "@id": "schema:albums", - "@type": "rdf:Property", - "domainIncludes": "schema:MusicGroup", - "rangeIncludes": "schema:MusicAlbum", - "rdfs:comment": "A collection of music albums (legacy spelling; see singular form, album).", - "rdfs:label": "albums", - "supercededBy": "schema:album" - }, - { - "@id": "schema:musicGroupMember", - "@type": "rdf:Property", - "domainIncludes": "schema:MusicGroup", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A member of a music group—for example, John, Paul, George, or Ringo.", - "rdfs:label": "musicGroupMember", - "supercededBy": "schema:member" - }, - { - "@id": "schema:track", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MusicPlaylist", - "schema:MusicGroup" - ], - "rangeIncludes": "schema:MusicRecording", - "rdfs:comment": "A music recording (track)—usually a single song.", - "rdfs:label": "track" - }, - { - "@id": "schema:tracks", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MusicPlaylist", - "schema:MusicGroup" - ], - "rangeIncludes": "schema:MusicRecording", - "rdfs:comment": "A music recording (track)—usually a single song (legacy spelling; see singular form, track).", - "rdfs:label": "tracks", - "supercededBy": "schema:track" - } - ] - }, - { - "@id": "schema:TheaterGroup", - "@type": "rdfs:Class", - "rdfs:comment": "A theater group or company—for example, the Royal Shakespeare Company or Druid Theatre.", - "rdfs:label": "TheaterGroup", - "rdfs:subClassOf": "schema:PerformingGroup" - } - ] - }, - { - "@id": "schema:SportsTeam", - "@type": "rdfs:Class", - "rdfs:comment": "Organization: Sports team.", - "rdfs:label": "SportsTeam", - "rdfs:subClassOf": "schema:Organization" - } - ], - "properties": [ - { - "@id": "schema:address", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:Person" - ], - "rangeIncludes": "schema:PostalAddress", - "rdfs:comment": "Physical address of the item.", - "rdfs:label": "address" - }, - { - "@id": "schema:aggregateRating", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:Offer", - "schema:Product" - ], - "rangeIncludes": "schema:AggregateRating", - "rdfs:comment": "The overall rating, based on a collection of reviews or ratings, of the item.", - "rdfs:label": "aggregateRating" - }, - { - "@id": "schema:brand", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person", - "schema:Product" - ], - "rangeIncludes": [ - "schema:Brand", - "schema:Organization" - ], - "rdfs:comment": "The brand(s) associated with a product or service, or the brand(s) maintained by an organization or business person.", - "rdfs:label": "brand" - }, - { - "@id": "schema:contactPoint", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:ContactPoint", - "rdfs:comment": "A contact point for a person or organization.", - "rdfs:label": "contactPoint" - }, - { - "@id": "schema:contactPoints", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:ContactPoint", - "rdfs:comment": "A contact point for a person or organization (legacy spelling; see singular form, contactPoint).", - "rdfs:label": "contactPoints", - "supercededBy": "schema:contactPoint" - }, - { - "@id": "schema:department", - "@type": "rdf:Property", - "domainIncludes": "schema:Organization", - "rangeIncludes": "schema:Organization", - "rdfs:comment": "A relationship between an organization and a department of that organization, also described as an organization (allowing different urls, logos, opening hours). For example: a store with a pharmacy, or a bakery with a cafe.", - "rdfs:label": "department" - }, - { - "@id": "schema:dissolutionDate", - "@type": "rdf:Property", - "domainIncludes": "schema:Organization", - "rangeIncludes": "schema:Date", - "rdfs:comment": "The date that this organization was dissolved.", - "rdfs:label": "dissolutionDate" - }, - { - "@id": "schema:duns", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The Dun & Bradstreet DUNS number for identifying an organization or business person.", - "rdfs:label": "duns" - }, - { - "@id": "schema:email", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:ContactPoint", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "Email address.", - "rdfs:label": "email" - }, - { - "@id": "schema:employee", - "@type": "rdf:Property", - "domainIncludes": "schema:Organization", - "rangeIncludes": "schema:Person", - "rdfs:comment": "Someone working for this organization.", - "rdfs:label": "employee" - }, - { - "@id": "schema:employees", - "@type": "rdf:Property", - "domainIncludes": "schema:Organization", - "rangeIncludes": "schema:Person", - "rdfs:comment": "People working for this organization. (legacy spelling; see singular form, employee)", - "rdfs:label": "employees", - "supercededBy": "schema:employee" - }, - { - "@id": "schema:event", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:InformAction", - "schema:PlayAction", - "schema:InviteAction", - "schema:JoinAction", - "schema:LeaveAction" - ], - "rangeIncludes": "schema:Event", - "rdfs:comment": "Upcoming or past event associated with this place or organization.", - "rdfs:label": "event" - }, - { - "@id": "schema:events", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place" - ], - "rangeIncludes": "schema:Event", - "rdfs:comment": "Upcoming or past events associated with this place or organization (legacy spelling; see singular form, event).", - "rdfs:label": "events", - "supercededBy": "schema:event" - }, - { - "@id": "schema:faxNumber", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:ContactPoint", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The fax number.", - "rdfs:label": "faxNumber" - }, - { - "@id": "schema:founder", - "@type": "rdf:Property", - "domainIncludes": "schema:Organization", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A person who founded this organization.", - "rdfs:label": "founder" - }, - { - "@id": "schema:founders", - "@type": "rdf:Property", - "domainIncludes": "schema:Organization", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A person who founded this organization (legacy spelling; see singular form, founder).", - "rdfs:label": "founders", - "supercededBy": "schema:founder" - }, - { - "@id": "schema:foundingDate", - "@type": "rdf:Property", - "domainIncludes": "schema:Organization", - "rangeIncludes": "schema:Date", - "rdfs:comment": "The date that this organization was founded.", - "rdfs:label": "foundingDate" - }, - { - "@id": "schema:globalLocationNumber", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The Global Location Number (GLN, sometimes also referred to as International Location Number or ILN) of the respective organization, person, or place. The GLN is a 13-digit number used to identify parties and physical locations.", - "rdfs:label": "globalLocationNumber" - }, - { - "@id": "schema:hasPOS", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:Place", - "rdfs:comment": "Points-of-Sales operated by the organization or person.", - "rdfs:label": "hasPOS" - }, - { - "@id": "schema:interactionCount", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:MediaObject", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "A count of a specific user interactions with this item—for example, 20 UserLikes, 5 UserComments, or 300 UserDownloads. The user interaction type should be one of the sub types of UserInteraction.", - "rdfs:label": "interactionCount" - }, - { - "@id": "schema:isicV4", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The International Standard of Industrial Classification of All Economic Activities (ISIC), Revision 4 code for a particular organization, business person, or place.", - "rdfs:label": "isicV4" - }, - { - "@id": "schema:legalName", - "@type": "rdf:Property", - "domainIncludes": "schema:Organization", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The official name of the organization, e.g. the registered company name.", - "rdfs:label": "legalName" - }, - { - "@id": "schema:location", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Event", - "schema:Action" - ], - "rangeIncludes": [ - "schema:Place", - "schema:PostalAddress" - ], - "rdfs:comment": "The location of the event, organization or action.", - "rdfs:label": "location" - }, - { - "@id": "schema:logo", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:Product", - "schema:Brand" - ], - "rangeIncludes": [ - "schema:ImageObject", - "schema:URL" - ], - "rdfs:comment": "A logo associated with an organization.", - "rdfs:label": "logo" - }, - { - "@id": "schema:makesOffer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:Offer", - "rdfs:comment": "A pointer to products or services offered by the organization or person.", - "rdfs:label": "makesOffer" - }, - { - "@id": "schema:member", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:ProgramMembership" - ], - "inverseOf": "schema:memberOf", - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A member of an Organization or a ProgramMembership. Organizations can be members of organizations; ProgramMembership is typically for individuals.", - "rdfs:label": "member" - }, - { - "@id": "schema:memberOf", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Person", - "schema:Organization" - ], - "inverseOf": "schema:member", - "rangeIncludes": [ - "schema:Organization", - "schema:ProgramMembership" - ], - "rdfs:comment": "An Organization (or ProgramMembership) to which this Person or Organization belongs.", - "rdfs:label": "memberOf" - }, - { - "@id": "schema:members", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:ProgramMembership" - ], - "rangeIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rdfs:comment": "A member of this organization (legacy spelling; see singular form, member).", - "rdfs:label": "members", - "supercededBy": "schema:member" - }, - { - "@id": "schema:naics", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The North American Industry Classification System (NAICS) code for a particular organization or business person.", - "rdfs:label": "naics" - }, - { - "@id": "schema:owns", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": [ - "schema:OwnershipInfo", - "schema:Product" - ], - "rdfs:comment": "Products owned by the organization or person.", - "rdfs:label": "owns" - }, - { - "@id": "schema:review", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:Offer", - "schema:Product" - ], - "rangeIncludes": "schema:Review", - "rdfs:comment": "A review of the item.", - "rdfs:label": "review" - }, - { - "@id": "schema:reviews", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:Offer", - "schema:Product" - ], - "rangeIncludes": "schema:Review", - "rdfs:comment": "Review of the item (legacy spelling; see singular form, review).", - "rdfs:label": "reviews", - "supercededBy": "schema:review" - }, - { - "@id": "schema:seeks", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:Demand", - "rdfs:comment": "A pointer to products or services sought by the organization or person (demand).", - "rdfs:label": "seeks" - }, - { - "@id": "schema:subOrganization", - "@type": "rdf:Property", - "domainIncludes": "schema:Organization", - "rangeIncludes": "schema:Organization", - "rdfs:comment": "A relationship between two organizations where the first includes the second, e.g., as a subsidiary. See also: the more specific 'department' property.", - "rdfs:label": "subOrganization" - }, - { - "@id": "schema:taxID", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The Tax / Fiscal ID of the organization or person, e.g. the TIN in the US or the CIF/NIF in Spain.", - "rdfs:label": "taxID" - }, - { - "@id": "schema:telephone", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:ContactPoint", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The telephone number.", - "rdfs:label": "telephone" - }, - { - "@id": "schema:vatID", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The Value-added Tax ID of the organization or person.", - "rdfs:label": "vatID" - } - ] - }, - { - "@id": "schema:Person", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_rNews" - }, - "owl:equivalentClass": "foaf:Person", - "rdfs:comment": "A person (alive, dead, undead, or fictional).", - "rdfs:label": "Person", - "rdfs:subClassOf": "schema:Thing", - "properties": [ - { - "@id": "schema:additionalName", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Text", - "rdfs:comment": "An additional name for a Person, can be used for a middle name.", - "rdfs:label": "additionalName" - }, - { - "@id": "schema:address", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:Person" - ], - "rangeIncludes": "schema:PostalAddress", - "rdfs:comment": "Physical address of the item.", - "rdfs:label": "address" - }, - { - "@id": "schema:affiliation", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Organization", - "rdfs:comment": "An organization that this person is affiliated with. For example, a school/university, a club, or a team.", - "rdfs:label": "affiliation" - }, - { - "@id": "schema:alumniOf", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "inverseOf": "schema:alumni", - "rangeIncludes": "schema:EducationalOrganization", - "rdfs:comment": "An educational organizations that the person is an alumni of.", - "rdfs:label": "alumniOf" - }, - { - "@id": "schema:award", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "An award won by this person or for this creative work.", - "rdfs:label": "award" - }, - { - "@id": "schema:awards", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "Awards won by this person or for this creative work. (legacy spelling; see singular form, award)", - "rdfs:label": "awards", - "supercededBy": "schema:award" - }, - { - "@id": "schema:birthDate", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Date", - "rdfs:comment": "Date of birth.", - "rdfs:label": "birthDate" - }, - { - "@id": "schema:brand", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person", - "schema:Product" - ], - "rangeIncludes": [ - "schema:Brand", - "schema:Organization" - ], - "rdfs:comment": "The brand(s) associated with a product or service, or the brand(s) maintained by an organization or business person.", - "rdfs:label": "brand" - }, - { - "@id": "schema:children", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A child of the person.", - "rdfs:label": "children" - }, - { - "@id": "schema:colleague", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A colleague of the person.", - "rdfs:label": "colleague" - }, - { - "@id": "schema:colleagues", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A colleague of the person (legacy spelling; see singular form, colleague).", - "rdfs:label": "colleagues", - "supercededBy": "schema:colleague" - }, - { - "@id": "schema:contactPoint", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:ContactPoint", - "rdfs:comment": "A contact point for a person or organization.", - "rdfs:label": "contactPoint" - }, - { - "@id": "schema:contactPoints", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:ContactPoint", - "rdfs:comment": "A contact point for a person or organization (legacy spelling; see singular form, contactPoint).", - "rdfs:label": "contactPoints", - "supercededBy": "schema:contactPoint" - }, - { - "@id": "schema:deathDate", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Date", - "rdfs:comment": "Date of death.", - "rdfs:label": "deathDate" - }, - { - "@id": "schema:duns", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The Dun & Bradstreet DUNS number for identifying an organization or business person.", - "rdfs:label": "duns" - }, - { - "@id": "schema:email", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:ContactPoint", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "Email address.", - "rdfs:label": "email" - }, - { - "@id": "schema:familyName", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Family name. In the U.S., the last name of an Person. This can be used along with givenName instead of the Name property.", - "rdfs:label": "familyName" - }, - { - "@id": "schema:faxNumber", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:ContactPoint", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The fax number.", - "rdfs:label": "faxNumber" - }, - { - "@id": "schema:follows", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Person", - "rdfs:comment": "The most generic uni-directional social relation.", - "rdfs:label": "follows" - }, - { - "@id": "schema:gender", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Gender of the person.", - "rdfs:label": "gender" - }, - { - "@id": "schema:givenName", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Given name. In the U.S., the first name of a Person. This can be used along with familyName instead of the Name property.", - "rdfs:label": "givenName" - }, - { - "@id": "schema:globalLocationNumber", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The Global Location Number (GLN, sometimes also referred to as International Location Number or ILN) of the respective organization, person, or place. The GLN is a 13-digit number used to identify parties and physical locations.", - "rdfs:label": "globalLocationNumber" - }, - { - "@id": "schema:hasPOS", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:Place", - "rdfs:comment": "Points-of-Sales operated by the organization or person.", - "rdfs:label": "hasPOS" - }, - { - "@id": "schema:homeLocation", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": [ - "schema:ContactPoint", - "schema:Place" - ], - "rdfs:comment": "A contact location for a person's residence.", - "rdfs:label": "homeLocation" - }, - { - "@id": "schema:honorificPrefix", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Text", - "rdfs:comment": "An honorific prefix preceding a Person's name such as Dr/Mrs/Mr.", - "rdfs:label": "honorificPrefix" - }, - { - "@id": "schema:honorificSuffix", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Text", - "rdfs:comment": "An honorific suffix preceding a Person's name such as M.D. /PhD/MSCSW.", - "rdfs:label": "honorificSuffix" - }, - { - "@id": "schema:interactionCount", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:MediaObject", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "A count of a specific user interactions with this item—for example, 20 UserLikes, 5 UserComments, or 300 UserDownloads. The user interaction type should be one of the sub types of UserInteraction.", - "rdfs:label": "interactionCount" - }, - { - "@id": "schema:isicV4", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The International Standard of Industrial Classification of All Economic Activities (ISIC), Revision 4 code for a particular organization, business person, or place.", - "rdfs:label": "isicV4" - }, - { - "@id": "schema:jobTitle", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The job title of the person (for example, Financial Manager).", - "rdfs:label": "jobTitle" - }, - { - "@id": "schema:knows", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Person", - "rdfs:comment": "The most generic bi-directional social/work relation.", - "rdfs:label": "knows" - }, - { - "@id": "schema:makesOffer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:Offer", - "rdfs:comment": "A pointer to products or services offered by the organization or person.", - "rdfs:label": "makesOffer" - }, - { - "@id": "schema:memberOf", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Person", - "schema:Organization" - ], - "inverseOf": "schema:member", - "rangeIncludes": [ - "schema:Organization", - "schema:ProgramMembership" - ], - "rdfs:comment": "An Organization (or ProgramMembership) to which this Person or Organization belongs.", - "rdfs:label": "memberOf" - }, - { - "@id": "schema:naics", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The North American Industry Classification System (NAICS) code for a particular organization or business person.", - "rdfs:label": "naics" - }, - { - "@id": "schema:nationality", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Country", - "rdfs:comment": "Nationality of the person.", - "rdfs:label": "nationality" - }, - { - "@id": "schema:owns", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": [ - "schema:OwnershipInfo", - "schema:Product" - ], - "rdfs:comment": "Products owned by the organization or person.", - "rdfs:label": "owns" - }, - { - "@id": "schema:parent", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A parent of this person.", - "rdfs:label": "parent" - }, - { - "@id": "schema:parents", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A parents of the person (legacy spelling; see singular form, parent).", - "rdfs:label": "parents", - "supercededBy": "schema:parent" - }, - { - "@id": "schema:performerIn", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Event", - "rdfs:comment": "Event that this person is a performer or participant in.", - "rdfs:label": "performerIn" - }, - { - "@id": "schema:relatedTo", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Person", - "rdfs:comment": "The most generic familial relation.", - "rdfs:label": "relatedTo" - }, - { - "@id": "schema:seeks", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:Demand", - "rdfs:comment": "A pointer to products or services sought by the organization or person (demand).", - "rdfs:label": "seeks" - }, - { - "@id": "schema:sibling", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A sibling of the person.", - "rdfs:label": "sibling" - }, - { - "@id": "schema:siblings", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Person", - "rdfs:comment": "A sibling of the person (legacy spelling; see singular form, sibling).", - "rdfs:label": "siblings", - "supercededBy": "schema:sibling" - }, - { - "@id": "schema:spouse", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Person", - "rdfs:comment": "The person's spouse.", - "rdfs:label": "spouse" - }, - { - "@id": "schema:taxID", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The Tax / Fiscal ID of the organization or person, e.g. the TIN in the US or the CIF/NIF in Spain.", - "rdfs:label": "taxID" - }, - { - "@id": "schema:telephone", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:ContactPoint", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The telephone number.", - "rdfs:label": "telephone" - }, - { - "@id": "schema:vatID", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The Value-added Tax ID of the organization or person.", - "rdfs:label": "vatID" - }, - { - "@id": "schema:workLocation", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": [ - "schema:ContactPoint", - "schema:Place" - ], - "rdfs:comment": "A contact location for a person's place of work.", - "rdfs:label": "workLocation" - }, - { - "@id": "schema:worksFor", - "@type": "rdf:Property", - "domainIncludes": "schema:Person", - "rangeIncludes": "schema:Organization", - "rdfs:comment": "Organizations that the person works for.", - "rdfs:label": "worksFor" - } - ] - }, - { - "@id": "schema:Place", - "@type": "rdfs:Class", - "rdfs:comment": "Entities that have a somewhat fixed, physical extension.", - "rdfs:label": "Place", - "rdfs:subClassOf": "schema:Thing", - "children": [ - { - "@id": "schema:AdministrativeArea", - "@type": "rdfs:Class", - "rdfs:comment": "A geographical region under the jurisdiction of a particular government.", - "rdfs:label": "AdministrativeArea", - "rdfs:subClassOf": "schema:Place", - "children": [ - { - "@id": "schema:City", - "@type": "rdfs:Class", - "rdfs:comment": "A city or town.", - "rdfs:label": "City", - "rdfs:subClassOf": "schema:AdministrativeArea" - }, - { - "@id": "schema:Country", - "@type": "rdfs:Class", - "rdfs:comment": "A country.", - "rdfs:label": "Country", - "rdfs:subClassOf": "schema:AdministrativeArea" - }, - { - "@id": "schema:State", - "@type": "rdfs:Class", - "rdfs:comment": "A state or province.", - "rdfs:label": "State", - "rdfs:subClassOf": "schema:AdministrativeArea" - } - ] - }, - { - "@id": "schema:CivicStructure", - "@type": "rdfs:Class", - "rdfs:comment": "A public structure, such as a town hall or concert hall.", - "rdfs:label": "CivicStructure", - "rdfs:subClassOf": "schema:Place", - "children": [ - { - "@id": "schema:Airport", - "@type": "rdfs:Class", - "rdfs:comment": "An airport.", - "rdfs:label": "Airport", - "rdfs:subClassOf": "schema:CivicStructure", - "properties": [ - { - "@id": "schema:iataCode", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Airline", - "schema:Airport" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "IATA identifier for an airline or airport", - "rdfs:label": "iataCode" - }, - { - "@id": "schema:icaoCode", - "@type": "rdf:Property", - "domainIncludes": "schema:Airport", - "rangeIncludes": "schema:Text", - "rdfs:comment": "IACO identifier for an airport.", - "rdfs:label": "iacoCode" - } - ] - }, - { - "@id": "schema:Aquarium", - "@type": "rdfs:Class", - "rdfs:comment": "Aquarium.", - "rdfs:label": "Aquarium", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:Beach", - "@type": "rdfs:Class", - "rdfs:comment": "Beach.", - "rdfs:label": "Beach", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:BusStation", - "@type": "rdfs:Class", - "rdfs:comment": "A bus station.", - "rdfs:label": "BusStation", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:BusStop", - "@type": "rdfs:Class", - "rdfs:comment": "A bus stop.", - "rdfs:label": "BusStop", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:Campground", - "@type": "rdfs:Class", - "rdfs:comment": "A campground.", - "rdfs:label": "Campground", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:Cemetery", - "@type": "rdfs:Class", - "rdfs:comment": "A graveyard.", - "rdfs:label": "Cemetery", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:Crematorium", - "@type": "rdfs:Class", - "rdfs:comment": "A crematorium.", - "rdfs:label": "Crematorium", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:EventVenue", - "@type": "rdfs:Class", - "rdfs:comment": "An event venue.", - "rdfs:label": "EventVenue", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:FireStation", - "@type": "rdfs:Class", - "rdfs:comment": "A fire station. With firemen.", - "rdfs:label": "FireStation", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:EmergencyService" - ] - }, - { - "@id": "schema:GovernmentBuilding", - "@type": "rdfs:Class", - "rdfs:comment": "A government building.", - "rdfs:label": "GovernmentBuilding", - "rdfs:subClassOf": "schema:CivicStructure", - "children": [ - { - "@id": "schema:CityHall", - "@type": "rdfs:Class", - "rdfs:comment": "A city hall.", - "rdfs:label": "CityHall", - "rdfs:subClassOf": "schema:GovernmentBuilding" - }, - { - "@id": "schema:Courthouse", - "@type": "rdfs:Class", - "rdfs:comment": "A courthouse.", - "rdfs:label": "Courthouse", - "rdfs:subClassOf": "schema:GovernmentBuilding" - }, - { - "@id": "schema:DefenceEstablishment", - "@type": "rdfs:Class", - "rdfs:comment": "A defence establishment, such as an army or navy base.", - "rdfs:label": "DefenceEstablishment", - "rdfs:subClassOf": "schema:GovernmentBuilding" - }, - { - "@id": "schema:Embassy", - "@type": "rdfs:Class", - "rdfs:comment": "An embassy.", - "rdfs:label": "Embassy", - "rdfs:subClassOf": "schema:GovernmentBuilding" - }, - { - "@id": "schema:LegislativeBuilding", - "@type": "rdfs:Class", - "rdfs:comment": "A legislative building—for example, the state capitol.", - "rdfs:label": "LegislativeBuilding", - "rdfs:subClassOf": "schema:GovernmentBuilding" - } - ] - }, - { - "@id": "schema:Hospital", - "@type": "rdfs:Class", - "rdfs:comment": "A hospital.", - "rdfs:label": "Hospital", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:EmergencyService", - "schema:MedicalOrganization" - ], - "properties": [ - { - "@id": "schema:availableService", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": [ - "schema:MedicalProcedure", - "schema:MedicalTest", - "schema:MedicalTherapy" - ], - "rdfs:comment": "A medical service available from this provider.", - "rdfs:label": "availableService" - }, - { - "@id": "schema:medicalSpecialty", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": "schema:MedicalSpecialty", - "rdfs:comment": "A medical specialty of the provider.", - "rdfs:label": "medicalSpecialty" - } - ] - }, - { - "@id": "schema:MovieTheater", - "@type": "rdfs:Class", - "rdfs:comment": "A movie theater.", - "rdfs:label": "MovieTheater", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:EntertainmentBusiness" - ] - }, - { - "@id": "schema:Museum", - "@type": "rdfs:Class", - "rdfs:comment": "A museum.", - "rdfs:label": "Museum", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:MusicVenue", - "@type": "rdfs:Class", - "rdfs:comment": "A music venue.", - "rdfs:label": "MusicVenue", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:Park", - "@type": "rdfs:Class", - "rdfs:comment": "A park.", - "rdfs:label": "Park", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:ParkingFacility", - "@type": "rdfs:Class", - "rdfs:comment": "A parking lot or other parking facility.", - "rdfs:label": "ParkingFacility", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:PerformingArtsTheater", - "@type": "rdfs:Class", - "rdfs:comment": "A theater or other performing art center.", - "rdfs:label": "PerformingArtsTheater", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:PlaceOfWorship", - "@type": "rdfs:Class", - "rdfs:comment": "Place of worship, such as a church, synagogue, or mosque.", - "rdfs:label": "PlaceOfWorship", - "rdfs:subClassOf": "schema:CivicStructure", - "children": [ - { - "@id": "schema:BuddhistTemple", - "@type": "rdfs:Class", - "rdfs:comment": "A Buddhist temple.", - "rdfs:label": "BuddhistTemple", - "rdfs:subClassOf": "schema:PlaceOfWorship" - }, - { - "@id": "schema:CatholicChurch", - "@type": "rdfs:Class", - "rdfs:comment": "A Catholic church.", - "rdfs:label": "CatholicChurch", - "rdfs:subClassOf": "schema:PlaceOfWorship" - }, - { - "@id": "schema:Church", - "@type": "rdfs:Class", - "rdfs:comment": "A church.", - "rdfs:label": "Church", - "rdfs:subClassOf": "schema:PlaceOfWorship" - }, - { - "@id": "schema:HinduTemple", - "@type": "rdfs:Class", - "rdfs:comment": "A Hindu temple.", - "rdfs:label": "HinduTemple", - "rdfs:subClassOf": "schema:PlaceOfWorship" - }, - { - "@id": "schema:Mosque", - "@type": "rdfs:Class", - "rdfs:comment": "A mosque.", - "rdfs:label": "Mosque", - "rdfs:subClassOf": "schema:PlaceOfWorship" - }, - { - "@id": "schema:Synagogue", - "@type": "rdfs:Class", - "rdfs:comment": "A synagogue.", - "rdfs:label": "Synagogue", - "rdfs:subClassOf": "schema:PlaceOfWorship" - } - ] - }, - { - "@id": "schema:Playground", - "@type": "rdfs:Class", - "rdfs:comment": "A playground.", - "rdfs:label": "Playground", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:PoliceStation", - "@type": "rdfs:Class", - "rdfs:comment": "A police station.", - "rdfs:label": "PoliceStation", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:EmergencyService" - ] - }, - { - "@id": "schema:RVPark", - "@type": "rdfs:Class", - "rdfs:comment": "An RV park.", - "rdfs:label": "RVPark", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:StadiumOrArena", - "@type": "rdfs:Class", - "rdfs:comment": "A stadium.", - "rdfs:label": "StadiumOrArena", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:SportsActivityLocation" - ] - }, - { - "@id": "schema:SubwayStation", - "@type": "rdfs:Class", - "rdfs:comment": "A subway station.", - "rdfs:label": "SubwayStation", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:TaxiStand", - "@type": "rdfs:Class", - "rdfs:comment": "A taxi stand.", - "rdfs:label": "TaxiStand", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:TrainStation", - "@type": "rdfs:Class", - "rdfs:comment": "A train station.", - "rdfs:label": "TrainStation", - "rdfs:subClassOf": "schema:CivicStructure" - }, - { - "@id": "schema:Zoo", - "@type": "rdfs:Class", - "rdfs:comment": "A zoo.", - "rdfs:label": "Zoo", - "rdfs:subClassOf": "schema:CivicStructure" - } - ], - "properties": [ - { - "@id": "schema:openingHours", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:LocalBusiness", - "schema:CivicStructure" - ], - "rangeIncludes": "schema:Duration", - "rdfs:comment": "The opening hours for a business. Opening hours can be specified as a weekly time range, starting with days, then times per day. Multiple days can be listed with commas ',' separating each day. Day or time ranges are specified using a hyphen '-'.
- Days are specified using the following two-letter combinations: Mo, Tu, We, Th, Fr, Sa, Su.
- Times are specified using 24:00 time. For example, 3pm is specified as 15:00.
- Here is an example: <time itemprop="openingHours" datetime="Tu,Th 16:00-20:00">Tuesdays and Thursdays 4-8pm</time>.
- If a business is open 7 days a week, then it can be specified as <time itemprop="openingHours" datetime="Mo-Su">Monday through Sunday, all day</time>.", - "rdfs:label": "openingHours" - } - ] - }, - { - "@id": "schema:Landform", - "@type": "rdfs:Class", - "rdfs:comment": "A landform or physical feature. Landform elements include mountains, plains, lakes, rivers, seascape and oceanic waterbody interface features such as bays, peninsulas, seas and so forth, including sub-aqueous terrain features such as submersed mountain ranges, volcanoes, and the great ocean basins.", - "rdfs:label": "Landform", - "rdfs:subClassOf": "schema:Place", - "children": [ - { - "@id": "schema:BodyOfWater", - "@type": "rdfs:Class", - "rdfs:comment": "A body of water, such as a sea, ocean, or lake.", - "rdfs:label": "BodyOfWater", - "rdfs:subClassOf": "schema:Landform", - "children": [ - { - "@id": "schema:Canal", - "@type": "rdfs:Class", - "rdfs:comment": "A canal, like the Panama Canal", - "rdfs:label": "Canal", - "rdfs:subClassOf": "schema:BodyOfWater" - }, - { - "@id": "schema:LakeBodyOfWater", - "@type": "rdfs:Class", - "rdfs:comment": "A lake (for example, Lake Pontrachain).", - "rdfs:label": "LakeBodyOfWater", - "rdfs:subClassOf": "schema:BodyOfWater" - }, - { - "@id": "schema:OceanBodyOfWater", - "@type": "rdfs:Class", - "rdfs:comment": "An ocean (for example, the Pacific).", - "rdfs:label": "OceanBodyOfWater", - "rdfs:subClassOf": "schema:BodyOfWater" - }, - { - "@id": "schema:Pond", - "@type": "rdfs:Class", - "rdfs:comment": "A pond", - "rdfs:label": "Pond", - "rdfs:subClassOf": "schema:BodyOfWater" - }, - { - "@id": "schema:Reservoir", - "@type": "rdfs:Class", - "rdfs:comment": "A reservoir, like the Lake Kariba reservoir.", - "rdfs:label": "Reservoir", - "rdfs:subClassOf": "schema:BodyOfWater" - }, - { - "@id": "schema:RiverBodyOfWater", - "@type": "rdfs:Class", - "rdfs:comment": "A river (for example, the broad majestic Shannon).", - "rdfs:label": "RiverBodyOfWater", - "rdfs:subClassOf": "schema:BodyOfWater" - }, - { - "@id": "schema:SeaBodyOfWater", - "@type": "rdfs:Class", - "rdfs:comment": "A sea (for example, the Caspian sea).", - "rdfs:label": "SeaBodyOfWater", - "rdfs:subClassOf": "schema:BodyOfWater" - }, - { - "@id": "schema:Waterfall", - "@type": "rdfs:Class", - "rdfs:comment": "A waterfall, like Niagara", - "rdfs:label": "Waterfall", - "rdfs:subClassOf": "schema:BodyOfWater" - } - ] - }, - { - "@id": "schema:Continent", - "@type": "rdfs:Class", - "rdfs:comment": "One of the continents (for example, Europe or Africa).", - "rdfs:label": "Continent", - "rdfs:subClassOf": "schema:Landform" - }, - { - "@id": "schema:Mountain", - "@type": "rdfs:Class", - "rdfs:comment": "A mountain, like Mount Whitney or Mount Everest", - "rdfs:label": "Mountain", - "rdfs:subClassOf": "schema:Landform" - }, - { - "@id": "schema:Volcano", - "@type": "rdfs:Class", - "rdfs:comment": "A volcano, like Fuji san.", - "rdfs:label": "Volcano", - "rdfs:subClassOf": "schema:Landform" - } - ] - }, - { - "@id": "schema:LandmarksOrHistoricalBuildings", - "@type": "rdfs:Class", - "rdfs:comment": "An historical landmark or building.", - "rdfs:label": "LandmarksOrHistoricalBuildings", - "rdfs:subClassOf": "schema:Place" - }, - { - "@id": "schema:LocalBusiness", - "@type": "rdfs:Class", - "rdfs:comment": "A particular physical business or branch of an organization. Examples of LocalBusiness include a restaurant, a particular branch of a restaurant chain, a branch of a bank, a medical practice, a club, a bowling alley, etc.", - "rdfs:label": "LocalBusiness", - "rdfs:subClassOf": [ - "schema:Organization", - "schema:Place" - ], - "children": [ - { - "@id": "schema:AnimalShelter", - "@type": "rdfs:Class", - "rdfs:comment": "Animal shelter.", - "rdfs:label": "AnimalShelter", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:AutomotiveBusiness", - "@type": "rdfs:Class", - "rdfs:comment": "Car repair, sales, or parts.", - "rdfs:label": "AutomotiveBusiness", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:AutoBodyShop", - "@type": "rdfs:Class", - "rdfs:comment": "Auto body shop.", - "rdfs:label": "AutoBodyShop", - "rdfs:subClassOf": "schema:AutomotiveBusiness" - }, - { - "@id": "schema:AutoDealer", - "@type": "rdfs:Class", - "rdfs:comment": "An car dealership.", - "rdfs:label": "AutoDealer", - "rdfs:subClassOf": "schema:AutomotiveBusiness" - }, - { - "@id": "schema:AutoPartsStore", - "@type": "rdfs:Class", - "rdfs:comment": "An auto parts store.", - "rdfs:label": "AutoPartsStore", - "rdfs:subClassOf": [ - "schema:AutomotiveBusiness", - "schema:Store" - ] - }, - { - "@id": "schema:AutoRental", - "@type": "rdfs:Class", - "rdfs:comment": "A car rental business.", - "rdfs:label": "AutoRental", - "rdfs:subClassOf": "schema:AutomotiveBusiness" - }, - { - "@id": "schema:AutoRepair", - "@type": "rdfs:Class", - "rdfs:comment": "Car repair business.", - "rdfs:label": "AutoRepair", - "rdfs:subClassOf": "schema:AutomotiveBusiness" - }, - { - "@id": "schema:AutoWash", - "@type": "rdfs:Class", - "rdfs:comment": "A car wash business.", - "rdfs:label": "AutoWash", - "rdfs:subClassOf": "schema:AutomotiveBusiness" - }, - { - "@id": "schema:GasStation", - "@type": "rdfs:Class", - "rdfs:comment": "A gas station.", - "rdfs:label": "GasStation", - "rdfs:subClassOf": "schema:AutomotiveBusiness" - }, - { - "@id": "schema:MotorcycleDealer", - "@type": "rdfs:Class", - "rdfs:comment": "A motorcycle dealer.", - "rdfs:label": "MotorcycleDealer", - "rdfs:subClassOf": "schema:AutomotiveBusiness" - }, - { - "@id": "schema:MotorcycleRepair", - "@type": "rdfs:Class", - "rdfs:comment": "A motorcycle repair shop.", - "rdfs:label": "MotorcycleRepair", - "rdfs:subClassOf": "schema:AutomotiveBusiness" - } - ] - }, - { - "@id": "schema:ChildCare", - "@type": "rdfs:Class", - "rdfs:comment": "A Childcare center.", - "rdfs:label": "ChildCare", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:DryCleaningOrLaundry", - "@type": "rdfs:Class", - "rdfs:comment": "A dry-cleaning business.", - "rdfs:label": "DryCleaningOrLaundry", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:EmergencyService", - "@type": "rdfs:Class", - "rdfs:comment": "An emergency service, such as a fire station or ER.", - "rdfs:label": "EmergencyService", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:FireStation", - "@type": "rdfs:Class", - "rdfs:comment": "A fire station. With firemen.", - "rdfs:label": "FireStation", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:EmergencyService" - ] - }, - { - "@id": "schema:Hospital", - "@type": "rdfs:Class", - "rdfs:comment": "A hospital.", - "rdfs:label": "Hospital", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:EmergencyService", - "schema:MedicalOrganization" - ], - "properties": [ - { - "@id": "schema:availableService", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": [ - "schema:MedicalProcedure", - "schema:MedicalTest", - "schema:MedicalTherapy" - ], - "rdfs:comment": "A medical service available from this provider.", - "rdfs:label": "availableService" - }, - { - "@id": "schema:medicalSpecialty", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": "schema:MedicalSpecialty", - "rdfs:comment": "A medical specialty of the provider.", - "rdfs:label": "medicalSpecialty" - } - ] - }, - { - "@id": "schema:PoliceStation", - "@type": "rdfs:Class", - "rdfs:comment": "A police station.", - "rdfs:label": "PoliceStation", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:EmergencyService" - ] - } - ] - }, - { - "@id": "schema:EmploymentAgency", - "@type": "rdfs:Class", - "rdfs:comment": "An employment agency.", - "rdfs:label": "EmploymentAgency", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:EntertainmentBusiness", - "@type": "rdfs:Class", - "rdfs:comment": "A business providing entertainment.", - "rdfs:label": "EntertainmentBusiness", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:AdultEntertainment", - "@type": "rdfs:Class", - "rdfs:comment": "An adult entertainment establishment.", - "rdfs:label": "AdultEntertainment", - "rdfs:subClassOf": "schema:EntertainmentBusiness" - }, - { - "@id": "schema:AmusementPark", - "@type": "rdfs:Class", - "rdfs:comment": "An amusement park.", - "rdfs:label": "AmusementPark", - "rdfs:subClassOf": "schema:EntertainmentBusiness" - }, - { - "@id": "schema:ArtGallery", - "@type": "rdfs:Class", - "rdfs:comment": "An art gallery.", - "rdfs:label": "ArtGallery", - "rdfs:subClassOf": "schema:EntertainmentBusiness" - }, - { - "@id": "schema:Casino", - "@type": "rdfs:Class", - "rdfs:comment": "A casino.", - "rdfs:label": "Casino", - "rdfs:subClassOf": "schema:EntertainmentBusiness" - }, - { - "@id": "schema:ComedyClub", - "@type": "rdfs:Class", - "rdfs:comment": "A comedy club.", - "rdfs:label": "ComedyClub", - "rdfs:subClassOf": "schema:EntertainmentBusiness" - }, - { - "@id": "schema:MovieTheater", - "@type": "rdfs:Class", - "rdfs:comment": "A movie theater.", - "rdfs:label": "MovieTheater", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:EntertainmentBusiness" - ] - }, - { - "@id": "schema:NightClub", - "@type": "rdfs:Class", - "rdfs:comment": "A nightclub or discotheque.", - "rdfs:label": "NightClub", - "rdfs:subClassOf": "schema:EntertainmentBusiness" - } - ] - }, - { - "@id": "schema:FinancialService", - "@type": "rdfs:Class", - "rdfs:comment": "Financial services business.", - "rdfs:label": "FinancialService", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:AccountingService", - "@type": "rdfs:Class", - "rdfs:comment": "Accountancy business.", - "rdfs:label": "AccountingService", - "rdfs:subClassOf": [ - "schema:FinancialService", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:AutomatedTeller", - "@type": "rdfs:Class", - "rdfs:comment": "ATM/cash machine.", - "rdfs:label": "AutomatedTeller", - "rdfs:subClassOf": "schema:FinancialService" - }, - { - "@id": "schema:BankOrCreditUnion", - "@type": "rdfs:Class", - "rdfs:comment": "Bank or credit union.", - "rdfs:label": "BankOrCreditUnion", - "rdfs:subClassOf": "schema:FinancialService" - }, - { - "@id": "schema:InsuranceAgency", - "@type": "rdfs:Class", - "rdfs:comment": "Insurance agency.", - "rdfs:label": "InsuranceAgency", - "rdfs:subClassOf": "schema:FinancialService" - } - ] - }, - { - "@id": "schema:FoodEstablishment", - "@type": "rdfs:Class", - "rdfs:comment": "A food-related business.", - "rdfs:label": "FoodEstablishment", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:Bakery", - "@type": "rdfs:Class", - "rdfs:comment": "A bakery.", - "rdfs:label": "Bakery", - "rdfs:subClassOf": "schema:FoodEstablishment" - }, - { - "@id": "schema:BarOrPub", - "@type": "rdfs:Class", - "rdfs:comment": "A bar or pub.", - "rdfs:label": "BarOrPub", - "rdfs:subClassOf": "schema:FoodEstablishment" - }, - { - "@id": "schema:Brewery", - "@type": "rdfs:Class", - "rdfs:comment": "Brewery.", - "rdfs:label": "Brewery", - "rdfs:subClassOf": "schema:FoodEstablishment" - }, - { - "@id": "schema:CafeOrCoffeeShop", - "@type": "rdfs:Class", - "rdfs:comment": "A cafe or coffee shop.", - "rdfs:label": "CafeOrCoffeeShop", - "rdfs:subClassOf": "schema:FoodEstablishment" - }, - { - "@id": "schema:FastFoodRestaurant", - "@type": "rdfs:Class", - "rdfs:comment": "A fast-food restaurant.", - "rdfs:label": "FastFoodRestaurant", - "rdfs:subClassOf": "schema:FoodEstablishment" - }, - { - "@id": "schema:IceCreamShop", - "@type": "rdfs:Class", - "rdfs:comment": "An ice cream shop", - "rdfs:label": "IceCreamShop", - "rdfs:subClassOf": "schema:FoodEstablishment" - }, - { - "@id": "schema:Restaurant", - "@type": "rdfs:Class", - "rdfs:comment": "A restaurant.", - "rdfs:label": "Restaurant", - "rdfs:subClassOf": "schema:FoodEstablishment" - }, - { - "@id": "schema:Winery", - "@type": "rdfs:Class", - "rdfs:comment": "A winery.", - "rdfs:label": "Winery", - "rdfs:subClassOf": "schema:FoodEstablishment" - } - ], - "properties": [ - { - "@id": "schema:acceptsReservations", - "@type": "rdf:Property", - "domainIncludes": "schema:FoodEstablishment", - "rangeIncludes": [ - "schema:Text", - "schema:URL", - "schema:Boolean" - ], - "rdfs:comment": "Indicates whether a FoodEstablishment accepts reservations. Values can be Boolean, an URL at which reservations can be made or (for backwards compatibility) the strings Yes or No.", - "rdfs:label": "acceptsReservations" - }, - { - "@id": "schema:menu", - "@type": "rdf:Property", - "domainIncludes": "schema:FoodEstablishment", - "rangeIncludes": [ - "schema:Text", - "schema:URL" - ], - "rdfs:comment": "Either the actual menu or a URL of the menu.", - "rdfs:label": "menu" - }, - { - "@id": "schema:servesCuisine", - "@type": "rdf:Property", - "domainIncludes": "schema:FoodEstablishment", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The cuisine of the restaurant.", - "rdfs:label": "servesCuisine" - } - ] - }, - { - "@id": "schema:GovernmentOffice", - "@type": "rdfs:Class", - "rdfs:comment": "A government office—for example, an IRS or DMV office.", - "rdfs:label": "GovernmentOffice", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:PostOffice", - "@type": "rdfs:Class", - "rdfs:comment": "A post office.", - "rdfs:label": "PostOffice", - "rdfs:subClassOf": "schema:GovernmentOffice" - } - ] - }, - { - "@id": "schema:HealthAndBeautyBusiness", - "@type": "rdfs:Class", - "rdfs:comment": "Health and beauty.", - "rdfs:label": "HealthAndBeautyBusiness", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:BeautySalon", - "@type": "rdfs:Class", - "rdfs:comment": "Beauty salon.", - "rdfs:label": "BeautySalon", - "rdfs:subClassOf": "schema:HealthAndBeautyBusiness" - }, - { - "@id": "schema:DaySpa", - "@type": "rdfs:Class", - "rdfs:comment": "A day spa.", - "rdfs:label": "DaySpa", - "rdfs:subClassOf": "schema:HealthAndBeautyBusiness" - }, - { - "@id": "schema:HairSalon", - "@type": "rdfs:Class", - "rdfs:comment": "A hair salon.", - "rdfs:label": "HairSalon", - "rdfs:subClassOf": "schema:HealthAndBeautyBusiness" - }, - { - "@id": "schema:HealthClub", - "@type": "rdfs:Class", - "rdfs:comment": "A health club.", - "rdfs:label": "HealthClub", - "rdfs:subClassOf": [ - "schema:HealthAndBeautyBusiness", - "schema:SportsActivityLocation" - ] - }, - { - "@id": "schema:NailSalon", - "@type": "rdfs:Class", - "rdfs:comment": "A nail salon.", - "rdfs:label": "NailSalon", - "rdfs:subClassOf": "schema:HealthAndBeautyBusiness" - }, - { - "@id": "schema:TattooParlor", - "@type": "rdfs:Class", - "rdfs:comment": "A tattoo parlor.", - "rdfs:label": "TattooParlor", - "rdfs:subClassOf": "schema:HealthAndBeautyBusiness" - } - ] - }, - { - "@id": "schema:HomeAndConstructionBusiness", - "@type": "rdfs:Class", - "rdfs:comment": "A construction business.", - "rdfs:label": "HomeAndConstructionBusiness", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:Electrician", - "@type": "rdfs:Class", - "rdfs:comment": "An electrician.", - "rdfs:label": "Electrician", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:GeneralContractor", - "@type": "rdfs:Class", - "rdfs:comment": "A general contractor.", - "rdfs:label": "GeneralContractor", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:HVACBusiness", - "@type": "rdfs:Class", - "rdfs:comment": "An HVAC service.", - "rdfs:label": "HVACBusiness", - "rdfs:subClassOf": "schema:HomeAndConstructionBusiness" - }, - { - "@id": "schema:HousePainter", - "@type": "rdfs:Class", - "rdfs:comment": "A house painting service.", - "rdfs:label": "HousePainter", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:Locksmith", - "@type": "rdfs:Class", - "rdfs:comment": "A locksmith.", - "rdfs:label": "Locksmith", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:MovingCompany", - "@type": "rdfs:Class", - "rdfs:comment": "A moving company.", - "rdfs:label": "MovingCompany", - "rdfs:subClassOf": "schema:HomeAndConstructionBusiness" - }, - { - "@id": "schema:Plumber", - "@type": "rdfs:Class", - "rdfs:comment": "A plumbing service.", - "rdfs:label": "Plumber", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:RoofingContractor", - "@type": "rdfs:Class", - "rdfs:comment": "A roofing contractor.", - "rdfs:label": "RoofingContractor", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - } - ] - }, - { - "@id": "schema:InternetCafe", - "@type": "rdfs:Class", - "rdfs:comment": "An internet cafe.", - "rdfs:label": "InternetCafe", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:Library", - "@type": "rdfs:Class", - "rdfs:comment": "A library.", - "rdfs:label": "Library", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:LodgingBusiness", - "@type": "rdfs:Class", - "rdfs:comment": "A lodging business, such as a motel, hotel, or inn.", - "rdfs:label": "LodgingBusiness", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:BedAndBreakfast", - "@type": "rdfs:Class", - "rdfs:comment": "Bed and breakfast.", - "rdfs:label": "BedAndBreakfast", - "rdfs:subClassOf": "schema:LodgingBusiness" - }, - { - "@id": "schema:Hostel", - "@type": "rdfs:Class", - "rdfs:comment": "A hostel.", - "rdfs:label": "Hostel", - "rdfs:subClassOf": "schema:LodgingBusiness" - }, - { - "@id": "schema:Hotel", - "@type": "rdfs:Class", - "rdfs:comment": "A hotel.", - "rdfs:label": "Hotel", - "rdfs:subClassOf": "schema:LodgingBusiness" - }, - { - "@id": "schema:Motel", - "@type": "rdfs:Class", - "rdfs:comment": "A motel.", - "rdfs:label": "Motel", - "rdfs:subClassOf": "schema:LodgingBusiness" - } - ] - }, - { - "@id": "schema:MedicalOrganization", - "@type": "rdfs:Class", - "rdfs:comment": "A medical organization, such as a doctor's office or clinic.", - "rdfs:label": "MedicalOrganization", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:Dentist", - "@type": "rdfs:Class", - "rdfs:comment": "A dentist.", - "rdfs:label": "Dentist", - "rdfs:subClassOf": [ - "schema:MedicalOrganization", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:DiagnosticLab", - "@type": "rdfs:Class", - "rdfs:comment": "A medical laboratory that offers on-site or off-site diagnostic services.", - "rdfs:label": "DiagnosticLab", - "rdfs:subClassOf": "schema:MedicalOrganization", - "properties": [ - { - "@id": "schema:availableTest", - "@type": "rdf:Property", - "domainIncludes": "schema:DiagnosticLab", - "rangeIncludes": "schema:MedicalTest", - "rdfs:comment": "A diagnostic test or procedure offered by this lab.", - "rdfs:label": "availableTest" - } - ] - }, - { - "@id": "schema:Hospital", - "@type": "rdfs:Class", - "rdfs:comment": "A hospital.", - "rdfs:label": "Hospital", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:EmergencyService", - "schema:MedicalOrganization" - ], - "properties": [ - { - "@id": "schema:availableService", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": [ - "schema:MedicalProcedure", - "schema:MedicalTest", - "schema:MedicalTherapy" - ], - "rdfs:comment": "A medical service available from this provider.", - "rdfs:label": "availableService" - }, - { - "@id": "schema:medicalSpecialty", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": "schema:MedicalSpecialty", - "rdfs:comment": "A medical specialty of the provider.", - "rdfs:label": "medicalSpecialty" - } - ] - }, - { - "@id": "schema:MedicalClinic", - "@type": "rdfs:Class", - "rdfs:comment": "A medical clinic.", - "rdfs:label": "MedicalClinic", - "rdfs:subClassOf": "schema:MedicalOrganization", - "properties": [ - { - "@id": "schema:availableService", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": [ - "schema:MedicalProcedure", - "schema:MedicalTest", - "schema:MedicalTherapy" - ], - "rdfs:comment": "A medical service available from this provider.", - "rdfs:label": "availableService" - }, - { - "@id": "schema:medicalSpecialty", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": "schema:MedicalSpecialty", - "rdfs:comment": "A medical specialty of the provider.", - "rdfs:label": "medicalSpecialty" - } - ] - }, - { - "@id": "schema:Optician", - "@type": "rdfs:Class", - "rdfs:comment": "An optician's store.", - "rdfs:label": "Optician", - "rdfs:subClassOf": "schema:MedicalOrganization" - }, - { - "@id": "schema:Pharmacy", - "@type": "rdfs:Class", - "rdfs:comment": "A pharmacy or drugstore.", - "rdfs:label": "Pharmacy", - "rdfs:subClassOf": "schema:MedicalOrganization" - }, - { - "@id": "schema:Physician", - "@type": "rdfs:Class", - "rdfs:comment": "A doctor's office.", - "rdfs:label": "Physician", - "rdfs:subClassOf": "schema:MedicalOrganization", - "properties": [ - { - "@id": "schema:availableService", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": [ - "schema:MedicalProcedure", - "schema:MedicalTest", - "schema:MedicalTherapy" - ], - "rdfs:comment": "A medical service available from this provider.", - "rdfs:label": "availableService" - }, - { - "@id": "schema:hospitalAffiliation", - "@type": "rdf:Property", - "domainIncludes": "schema:Physician", - "rangeIncludes": "schema:Hospital", - "rdfs:comment": "A hospital with which the physician or office is affiliated.", - "rdfs:label": "hospitalAffiliation" - }, - { - "@id": "schema:medicalSpecialty", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Hospital", - "schema:MedicalClinic", - "schema:Physician" - ], - "rangeIncludes": "schema:MedicalSpecialty", - "rdfs:comment": "A medical specialty of the provider.", - "rdfs:label": "medicalSpecialty" - } - ] - }, - { - "@id": "schema:VeterinaryCare", - "@type": "rdfs:Class", - "rdfs:comment": "A vet's office.", - "rdfs:label": "VeterinaryCare", - "rdfs:subClassOf": "schema:MedicalOrganization" - } - ] - }, - { - "@id": "schema:ProfessionalService", - "@type": "rdfs:Class", - "rdfs:comment": "Provider of professional services.", - "rdfs:label": "ProfessionalService", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:AccountingService", - "@type": "rdfs:Class", - "rdfs:comment": "Accountancy business.", - "rdfs:label": "AccountingService", - "rdfs:subClassOf": [ - "schema:FinancialService", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:Attorney", - "@type": "rdfs:Class", - "rdfs:comment": "Professional service: Attorney.", - "rdfs:label": "Attorney", - "rdfs:subClassOf": "schema:ProfessionalService" - }, - { - "@id": "schema:Dentist", - "@type": "rdfs:Class", - "rdfs:comment": "A dentist.", - "rdfs:label": "Dentist", - "rdfs:subClassOf": [ - "schema:MedicalOrganization", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:Electrician", - "@type": "rdfs:Class", - "rdfs:comment": "An electrician.", - "rdfs:label": "Electrician", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:GeneralContractor", - "@type": "rdfs:Class", - "rdfs:comment": "A general contractor.", - "rdfs:label": "GeneralContractor", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:HousePainter", - "@type": "rdfs:Class", - "rdfs:comment": "A house painting service.", - "rdfs:label": "HousePainter", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:Locksmith", - "@type": "rdfs:Class", - "rdfs:comment": "A locksmith.", - "rdfs:label": "Locksmith", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:Notary", - "@type": "rdfs:Class", - "rdfs:comment": "A notary.", - "rdfs:label": "Notary", - "rdfs:subClassOf": "schema:ProfessionalService" - }, - { - "@id": "schema:Plumber", - "@type": "rdfs:Class", - "rdfs:comment": "A plumbing service.", - "rdfs:label": "Plumber", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - }, - { - "@id": "schema:RoofingContractor", - "@type": "rdfs:Class", - "rdfs:comment": "A roofing contractor.", - "rdfs:label": "RoofingContractor", - "rdfs:subClassOf": [ - "schema:HomeAndConstructionBusiness", - "schema:ProfessionalService" - ] - } - ] - }, - { - "@id": "schema:RadioStation", - "@type": "rdfs:Class", - "rdfs:comment": "A radio station.", - "rdfs:label": "RadioStation", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:RealEstateAgent", - "@type": "rdfs:Class", - "rdfs:comment": "A real-estate agent.", - "rdfs:label": "RealEstateAgent", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:RecyclingCenter", - "@type": "rdfs:Class", - "rdfs:comment": "A recycling center.", - "rdfs:label": "RecyclingCenter", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:SelfStorage", - "@type": "rdfs:Class", - "rdfs:comment": "Self-storage facility.", - "rdfs:label": "SelfStorage", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:ShoppingCenter", - "@type": "rdfs:Class", - "rdfs:comment": "A shopping center or mall.", - "rdfs:label": "ShoppingCenter", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:SportsActivityLocation", - "@type": "rdfs:Class", - "rdfs:comment": "A sports location, such as a playing field.", - "rdfs:label": "SportsActivityLocation", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:BowlingAlley", - "@type": "rdfs:Class", - "rdfs:comment": "A bowling alley.", - "rdfs:label": "BowlingAlley", - "rdfs:subClassOf": "schema:SportsActivityLocation" - }, - { - "@id": "schema:ExerciseGym", - "@type": "rdfs:Class", - "rdfs:comment": "A gym.", - "rdfs:label": "ExerciseGym", - "rdfs:subClassOf": "schema:SportsActivityLocation" - }, - { - "@id": "schema:GolfCourse", - "@type": "rdfs:Class", - "rdfs:comment": "A golf course.", - "rdfs:label": "GolfCourse", - "rdfs:subClassOf": "schema:SportsActivityLocation" - }, - { - "@id": "schema:HealthClub", - "@type": "rdfs:Class", - "rdfs:comment": "A health club.", - "rdfs:label": "HealthClub", - "rdfs:subClassOf": [ - "schema:HealthAndBeautyBusiness", - "schema:SportsActivityLocation" - ] - }, - { - "@id": "schema:PublicSwimmingPool", - "@type": "rdfs:Class", - "rdfs:comment": "A public swimming pool.", - "rdfs:label": "PublicSwimmingPool", - "rdfs:subClassOf": "schema:SportsActivityLocation" - }, - { - "@id": "schema:SkiResort", - "@type": "rdfs:Class", - "rdfs:comment": "A ski resort.", - "rdfs:label": "SkiResort", - "rdfs:subClassOf": "schema:SportsActivityLocation" - }, - { - "@id": "schema:SportsClub", - "@type": "rdfs:Class", - "rdfs:comment": "A sports club.", - "rdfs:label": "SportsClub", - "rdfs:subClassOf": "schema:SportsActivityLocation" - }, - { - "@id": "schema:StadiumOrArena", - "@type": "rdfs:Class", - "rdfs:comment": "A stadium.", - "rdfs:label": "StadiumOrArena", - "rdfs:subClassOf": [ - "schema:CivicStructure", - "schema:SportsActivityLocation" - ] - }, - { - "@id": "schema:TennisComplex", - "@type": "rdfs:Class", - "rdfs:comment": "A tennis complex.", - "rdfs:label": "TennisComplex", - "rdfs:subClassOf": "schema:SportsActivityLocation" - } - ] - }, - { - "@id": "schema:Store", - "@type": "rdfs:Class", - "rdfs:comment": "A retail good store.", - "rdfs:label": "Store", - "rdfs:subClassOf": "schema:LocalBusiness", - "children": [ - { - "@id": "schema:AutoPartsStore", - "@type": "rdfs:Class", - "rdfs:comment": "An auto parts store.", - "rdfs:label": "AutoPartsStore", - "rdfs:subClassOf": [ - "schema:AutomotiveBusiness", - "schema:Store" - ] - }, - { - "@id": "schema:BikeStore", - "@type": "rdfs:Class", - "rdfs:comment": "A bike store.", - "rdfs:label": "BikeStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:BookStore", - "@type": "rdfs:Class", - "rdfs:comment": "A bookstore.", - "rdfs:label": "BookStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:ClothingStore", - "@type": "rdfs:Class", - "rdfs:comment": "A clothing store.", - "rdfs:label": "ClothingStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:ComputerStore", - "@type": "rdfs:Class", - "rdfs:comment": "A computer store.", - "rdfs:label": "ComputerStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:ConvenienceStore", - "@type": "rdfs:Class", - "rdfs:comment": "A convenience store.", - "rdfs:label": "ConvenienceStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:DepartmentStore", - "@type": "rdfs:Class", - "rdfs:comment": "A department store.", - "rdfs:label": "DepartmentStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:ElectronicsStore", - "@type": "rdfs:Class", - "rdfs:comment": "An electronics store.", - "rdfs:label": "ElectronicsStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:Florist", - "@type": "rdfs:Class", - "rdfs:comment": "A florist.", - "rdfs:label": "Florist", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:FurnitureStore", - "@type": "rdfs:Class", - "rdfs:comment": "A furniture store.", - "rdfs:label": "FurnitureStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:GardenStore", - "@type": "rdfs:Class", - "rdfs:comment": "A garden store.", - "rdfs:label": "GardenStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:GroceryStore", - "@type": "rdfs:Class", - "rdfs:comment": "A grocery store.", - "rdfs:label": "GroceryStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:HardwareStore", - "@type": "rdfs:Class", - "rdfs:comment": "A hardware store.", - "rdfs:label": "HardwareStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:HobbyShop", - "@type": "rdfs:Class", - "rdfs:comment": "A hobby store.", - "rdfs:label": "HobbyShop", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:HomeGoodsStore", - "@type": "rdfs:Class", - "rdfs:comment": "A home goods store.", - "rdfs:label": "HomeGoodsStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:JewelryStore", - "@type": "rdfs:Class", - "rdfs:comment": "A jewelry store.", - "rdfs:label": "JewelryStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:LiquorStore", - "@type": "rdfs:Class", - "rdfs:comment": "A liquor store.", - "rdfs:label": "LiquorStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:MensClothingStore", - "@type": "rdfs:Class", - "rdfs:comment": "A men's clothing store.", - "rdfs:label": "MensClothingStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:MobilePhoneStore", - "@type": "rdfs:Class", - "rdfs:comment": "A mobile-phone store.", - "rdfs:label": "MobilePhoneStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:MovieRentalStore", - "@type": "rdfs:Class", - "rdfs:comment": "A movie rental store.", - "rdfs:label": "MovieRentalStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:MusicStore", - "@type": "rdfs:Class", - "rdfs:comment": "A music store.", - "rdfs:label": "MusicStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:OfficeEquipmentStore", - "@type": "rdfs:Class", - "rdfs:comment": "An office equipment store.", - "rdfs:label": "OfficeEquipmentStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:OutletStore", - "@type": "rdfs:Class", - "rdfs:comment": "An outlet store.", - "rdfs:label": "OutletStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:PawnShop", - "@type": "rdfs:Class", - "rdfs:comment": "A pawn store.", - "rdfs:label": "PawnShop", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:PetStore", - "@type": "rdfs:Class", - "rdfs:comment": "A pet store.", - "rdfs:label": "PetStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:ShoeStore", - "@type": "rdfs:Class", - "rdfs:comment": "A shoe store.", - "rdfs:label": "ShoeStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:SportingGoodsStore", - "@type": "rdfs:Class", - "rdfs:comment": "A sporting goods store.", - "rdfs:label": "SportingGoodsStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:TireShop", - "@type": "rdfs:Class", - "rdfs:comment": "A tire shop.", - "rdfs:label": "TireShop", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:ToyStore", - "@type": "rdfs:Class", - "rdfs:comment": "A toy store.", - "rdfs:label": "ToyStore", - "rdfs:subClassOf": "schema:Store" - }, - { - "@id": "schema:WholesaleStore", - "@type": "rdfs:Class", - "rdfs:comment": "A wholesale store.", - "rdfs:label": "WholesaleStore", - "rdfs:subClassOf": "schema:Store" - } - ] - }, - { - "@id": "schema:TelevisionStation", - "@type": "rdfs:Class", - "rdfs:comment": "A television station.", - "rdfs:label": "TelevisionStation", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:TouristInformationCenter", - "@type": "rdfs:Class", - "rdfs:comment": "A tourist information center.", - "rdfs:label": "TouristInformationCenter", - "rdfs:subClassOf": "schema:LocalBusiness" - }, - { - "@id": "schema:TravelAgency", - "@type": "rdfs:Class", - "rdfs:comment": "A travel agency.", - "rdfs:label": "TravelAgency", - "rdfs:subClassOf": "schema:LocalBusiness" - } - ], - "properties": [ - { - "@id": "schema:branchOf", - "@type": "rdf:Property", - "domainIncludes": "schema:LocalBusiness", - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The larger organization that this local business is a branch of, if any.", - "rdfs:label": "branchOf" - }, - { - "@id": "schema:currenciesAccepted", - "@type": "rdf:Property", - "domainIncludes": "schema:LocalBusiness", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The currency accepted (in ISO 4217 currency format).", - "rdfs:label": "currenciesAccepted" - }, - { - "@id": "schema:openingHours", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:LocalBusiness", - "schema:CivicStructure" - ], - "rangeIncludes": "schema:Duration", - "rdfs:comment": "The opening hours for a business. Opening hours can be specified as a weekly time range, starting with days, then times per day. Multiple days can be listed with commas ',' separating each day. Day or time ranges are specified using a hyphen '-'.
- Days are specified using the following two-letter combinations: Mo, Tu, We, Th, Fr, Sa, Su.
- Times are specified using 24:00 time. For example, 3pm is specified as 15:00.
- Here is an example: <time itemprop="openingHours" datetime="Tu,Th 16:00-20:00">Tuesdays and Thursdays 4-8pm</time>.
- If a business is open 7 days a week, then it can be specified as <time itemprop="openingHours" datetime="Mo-Su">Monday through Sunday, all day</time>.", - "rdfs:label": "openingHours" - }, - { - "@id": "schema:paymentAccepted", - "@type": "rdf:Property", - "domainIncludes": "schema:LocalBusiness", - "rangeIncludes": "schema:Text", - "rdfs:comment": "Cash, credit card, etc.", - "rdfs:label": "paymentAccepted" - }, - { - "@id": "schema:priceRange", - "@type": "rdf:Property", - "domainIncludes": "schema:LocalBusiness", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The price range of the business, for example $$$.", - "rdfs:label": "priceRange" - } - ] - }, - { - "@id": "schema:Residence", - "@type": "rdfs:Class", - "rdfs:comment": "The place where a person lives.", - "rdfs:label": "Residence", - "rdfs:subClassOf": "schema:Place", - "children": [ - { - "@id": "schema:ApartmentComplex", - "@type": "rdfs:Class", - "rdfs:comment": "Residence type: Apartment complex.", - "rdfs:label": "ApartmentComplex", - "rdfs:subClassOf": "schema:Residence" - }, - { - "@id": "schema:GatedResidenceCommunity", - "@type": "rdfs:Class", - "rdfs:comment": "Residence type: Gated community.", - "rdfs:label": "GatedResidenceCommunity", - "rdfs:subClassOf": "schema:Residence" - }, - { - "@id": "schema:SingleFamilyResidence", - "@type": "rdfs:Class", - "rdfs:comment": "Residence type: Single-family home.", - "rdfs:label": "SingleFamilyResidence", - "rdfs:subClassOf": "schema:Residence" - } - ] - }, - { - "@id": "schema:TouristAttraction", - "@type": "rdfs:Class", - "rdfs:comment": "A tourist attraction.", - "rdfs:label": "TouristAttraction", - "rdfs:subClassOf": "schema:Place" - } - ], - "properties": [ - { - "@id": "schema:address", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:Person" - ], - "rangeIncludes": "schema:PostalAddress", - "rdfs:comment": "Physical address of the item.", - "rdfs:label": "address" - }, - { - "@id": "schema:aggregateRating", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:Offer", - "schema:Product" - ], - "rangeIncludes": "schema:AggregateRating", - "rdfs:comment": "The overall rating, based on a collection of reviews or ratings, of the item.", - "rdfs:label": "aggregateRating" - }, - { - "@id": "schema:containedIn", - "@type": "rdf:Property", - "domainIncludes": "schema:Place", - "rangeIncludes": "schema:Place", - "rdfs:comment": "The basic containment relation between places.", - "rdfs:label": "containedIn" - }, - { - "@id": "schema:event", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:InformAction", - "schema:PlayAction", - "schema:InviteAction", - "schema:JoinAction", - "schema:LeaveAction" - ], - "rangeIncludes": "schema:Event", - "rdfs:comment": "Upcoming or past event associated with this place or organization.", - "rdfs:label": "event" - }, - { - "@id": "schema:events", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place" - ], - "rangeIncludes": "schema:Event", - "rdfs:comment": "Upcoming or past events associated with this place or organization (legacy spelling; see singular form, event).", - "rdfs:label": "events", - "supercededBy": "schema:event" - }, - { - "@id": "schema:faxNumber", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:ContactPoint", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The fax number.", - "rdfs:label": "faxNumber" - }, - { - "@id": "schema:geo", - "@type": "rdf:Property", - "domainIncludes": "schema:Place", - "rangeIncludes": [ - "schema:GeoCoordinates", - "schema:GeoShape" - ], - "rdfs:comment": "The geo coordinates of the place.", - "rdfs:label": "geo" - }, - { - "@id": "schema:globalLocationNumber", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The Global Location Number (GLN, sometimes also referred to as International Location Number or ILN) of the respective organization, person, or place. The GLN is a 13-digit number used to identify parties and physical locations.", - "rdfs:label": "globalLocationNumber" - }, - { - "@id": "schema:interactionCount", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:MediaObject", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "A count of a specific user interactions with this item—for example, 20 UserLikes, 5 UserComments, or 300 UserDownloads. The user interaction type should be one of the sub types of UserInteraction.", - "rdfs:label": "interactionCount" - }, - { - "@id": "schema:isicV4", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The International Standard of Industrial Classification of All Economic Activities (ISIC), Revision 4 code for a particular organization, business person, or place.", - "rdfs:label": "isicV4" - }, - { - "@id": "schema:logo", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:Product", - "schema:Brand" - ], - "rangeIncludes": [ - "schema:ImageObject", - "schema:URL" - ], - "rdfs:comment": "A logo associated with an organization.", - "rdfs:label": "logo" - }, - { - "@id": "schema:map", - "@type": "rdf:Property", - "domainIncludes": "schema:Place", - "rangeIncludes": "schema:URL", - "rdfs:comment": "A URL to a map of the place.", - "rdfs:label": "map" - }, - { - "@id": "schema:maps", - "@type": "rdf:Property", - "domainIncludes": "schema:Place", - "rangeIncludes": "schema:URL", - "rdfs:comment": "A URL to a map of the place (legacy spelling; see singular form, map).", - "rdfs:label": "maps", - "supercededBy": "schema:map" - }, - { - "@id": "schema:openingHoursSpecification", - "@type": "rdf:Property", - "domainIncludes": "schema:Place", - "rangeIncludes": "schema:OpeningHoursSpecification", - "rdfs:comment": "The opening hours of a certain place.", - "rdfs:label": "openingHoursSpecification" - }, - { - "@id": "schema:photo", - "@type": "rdf:Property", - "domainIncludes": "schema:Place", - "rangeIncludes": [ - "schema:ImageObject", - "schema:Photograph" - ], - "rdfs:comment": "A photograph of this place.", - "rdfs:label": "photo" - }, - { - "@id": "schema:photos", - "@type": "rdf:Property", - "domainIncludes": "schema:Place", - "rangeIncludes": [ - "schema:ImageObject", - "schema:Photograph" - ], - "rdfs:comment": "Photographs of this place (legacy spelling; see singular form, photo).", - "rdfs:label": "photos", - "supercededBy": "schema:photo" - }, - { - "@id": "schema:review", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:Offer", - "schema:Product" - ], - "rangeIncludes": "schema:Review", - "rdfs:comment": "A review of the item.", - "rdfs:label": "review" - }, - { - "@id": "schema:reviews", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:Offer", - "schema:Product" - ], - "rangeIncludes": "schema:Review", - "rdfs:comment": "Review of the item (legacy spelling; see singular form, review).", - "rdfs:label": "reviews", - "supercededBy": "schema:review" - }, - { - "@id": "schema:telephone", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:ContactPoint", - "schema:Person" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The telephone number.", - "rdfs:label": "telephone" - } - ] - }, - { - "@id": "schema:Product", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsProperties" - }, - "rdfs:comment": "Any offered product or service. For example: a pair of shoes; a concert ticket; the rental of a car; a haircut; or an episode of a TV show streamed online.", - "rdfs:label": "Product", - "rdfs:subClassOf": "schema:Thing", - "children": [ - { - "@id": "schema:IndividualProduct", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A single, identifiable product instance (e.g. a laptop with a particular serial number).", - "rdfs:label": "IndividualProduct", - "rdfs:subClassOf": "schema:Product", - "properties": [ - { - "@id": "schema:serialNumber", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand", - "schema:IndividualProduct" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The serial number or any alphanumeric identifier of a particular product. When attached to an offer, it is a shortcut for the serial number of the product included in the offer.", - "rdfs:label": "serialNumber" - } - ] - }, - { - "@id": "schema:ProductModel", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A datasheet or vendor specification of a product (in the sense of a prototypical description).", - "rdfs:label": "ProductModel", - "rdfs:subClassOf": "schema:Product", - "properties": [ - { - "@id": "schema:isVariantOf", - "@type": "rdf:Property", - "domainIncludes": "schema:ProductModel", - "rangeIncludes": "schema:ProductModel", - "rdfs:comment": "A pointer to a base product from which this product is a variant. It is safe to infer that the variant inherits all product features from the base model, unless defined locally. This is not transitive.", - "rdfs:label": "isVariantOf" - }, - { - "@id": "schema:predecessorOf", - "@type": "rdf:Property", - "domainIncludes": "schema:ProductModel", - "rangeIncludes": "schema:ProductModel", - "rdfs:comment": "A pointer from a previous, often discontinued variant of the product to its newer variant.", - "rdfs:label": "predecessorOf" - }, - { - "@id": "schema:successorOf", - "@type": "rdf:Property", - "domainIncludes": "schema:ProductModel", - "rangeIncludes": "schema:ProductModel", - "rdfs:comment": "A pointer from a newer variant of a product to its previous, often discontinued predecessor.", - "rdfs:label": "successorOf" - } - ] - }, - { - "@id": "schema:SomeProducts", - "@type": "rdfs:Class", - "http://purl.org/dc/terms/source": { - "@id": "http://www.w3.org/wiki/WebSchemas/SchemaDotOrgSources#source_GoodRelationsClass" - }, - "rdfs:comment": "A placeholder for multiple similar products of the same kind.", - "rdfs:label": "SomeProducts", - "rdfs:subClassOf": "schema:Product", - "properties": [ - { - "@id": "schema:inventoryLevel", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Demand", - "schema:SomeProducts" - ], - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The current approximate inventory level for the item or items.", - "rdfs:label": "inventoryLevel" - } - ] - }, - { - "@id": "schema:Vehicle", - "@type": "rdfs:Class", - "rdfs:comment": "A vehicle.", - "rdfs:label": "Vehicle", - "rdfs:subClassOf": "schema:Product", - "children": [ - { - "@id": "schema:Car", - "@type": "rdfs:Class", - "rdfs:comment": "An automobile.", - "rdfs:label": "Car", - "rdfs:subClassOf": "schema:Vehicle" - } - ] - } - ], - "properties": [ - { - "@id": "schema:aggregateRating", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:Offer", - "schema:Product" - ], - "rangeIncludes": "schema:AggregateRating", - "rdfs:comment": "The overall rating, based on a collection of reviews or ratings, of the item.", - "rdfs:label": "aggregateRating" - }, - { - "@id": "schema:audience", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Product", - "schema:PlayAction" - ], - "rangeIncludes": "schema:Audience", - "rdfs:comment": "The intended audience of the item, i.e. the group for whom the item was created.", - "rdfs:label": "audience" - }, - { - "@id": "schema:brand", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Person", - "schema:Product" - ], - "rangeIncludes": [ - "schema:Brand", - "schema:Organization" - ], - "rdfs:comment": "The brand(s) associated with a product or service, or the brand(s) maintained by an organization or business person.", - "rdfs:label": "brand" - }, - { - "@id": "schema:color", - "@type": "rdf:Property", - "domainIncludes": "schema:Product", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The color of the product.", - "rdfs:label": "color" - }, - { - "@id": "schema:depth", - "@type": "rdf:Property", - "domainIncludes": "schema:Product", - "rangeIncludes": [ - "schema:Distance", - "schema:QuantitativeValue" - ], - "rdfs:comment": "The depth of the product.", - "rdfs:label": "depth" - }, - { - "@id": "schema:gtin13", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The GTIN-13 code of the product, or the product to which the offer refers. This is equivalent to 13-digit ISBN codes and EAN UCC-13. Former 12-digit UPC codes can be converted into a GTIN-13 code by simply adding a preceeding zero.", - "rdfs:label": "gtin13" - }, - { - "@id": "schema:gtin14", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The GTIN-14 code of the product, or the product to which the offer refers.", - "rdfs:label": "gtin14" - }, - { - "@id": "schema:gtin8", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The GTIN-8 code of the product, or the product to which the offer refers. This code is also known as EAN/UCC-8 or 8-digit EAN.", - "rdfs:label": "gtin8" - }, - { - "@id": "schema:height", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Product" - ], - "rangeIncludes": [ - "schema:Distance", - "schema:QuantitativeValue" - ], - "rdfs:comment": "The height of the item.", - "rdfs:label": "height" - }, - { - "@id": "schema:isAccessoryOrSparePartFor", - "@type": "rdf:Property", - "domainIncludes": "schema:Product", - "rangeIncludes": "schema:Product", - "rdfs:comment": "A pointer to another product (or multiple products) for which this product is an accessory or spare part.", - "rdfs:label": "isAccessoryOrSparePartFor" - }, - { - "@id": "schema:isConsumableFor", - "@type": "rdf:Property", - "domainIncludes": "schema:Product", - "rangeIncludes": "schema:Product", - "rdfs:comment": "A pointer to another product (or multiple products) for which this product is a consumable.", - "rdfs:label": "isConsumableFor" - }, - { - "@id": "schema:isRelatedTo", - "@type": "rdf:Property", - "domainIncludes": "schema:Product", - "rangeIncludes": "schema:Product", - "rdfs:comment": "A pointer to another, somehow related product (or multiple products).", - "rdfs:label": "isRelatedTo" - }, - { - "@id": "schema:isSimilarTo", - "@type": "rdf:Property", - "domainIncludes": "schema:Product", - "rangeIncludes": "schema:Product", - "rdfs:comment": "A pointer to another, functionally similar product (or multiple products).", - "rdfs:label": "isSimilarTo" - }, - { - "@id": "schema:itemCondition", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:OfferItemCondition", - "rdfs:comment": "A predefined value from OfferItemCondition or a textual description of the condition of the product or service, or the products or services included in the offer.", - "rdfs:label": "itemCondition" - }, - { - "@id": "schema:logo", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Organization", - "schema:Place", - "schema:Product", - "schema:Brand" - ], - "rangeIncludes": [ - "schema:ImageObject", - "schema:URL" - ], - "rdfs:comment": "A logo associated with an organization.", - "rdfs:label": "logo" - }, - { - "@id": "schema:manufacturer", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:DietarySupplement", - "schema:Drug", - "schema:Product" - ], - "rangeIncludes": "schema:Organization", - "rdfs:comment": "The manufacturer of the product.", - "rdfs:label": "manufacturer" - }, - { - "@id": "schema:model", - "@type": "rdf:Property", - "domainIncludes": "schema:Product", - "rangeIncludes": [ - "schema:ProductModel", - "schema:Text" - ], - "rdfs:comment": "The model of the product. Use with the URL of a ProductModel or a textual representation of the model identifier. The URL of the ProductModel can be from an external source. It is recommended to additionally provide strong product identifiers via the gtin8/gtin13/gtin14 and mpn properties.", - "rdfs:label": "model" - }, - { - "@id": "schema:mpn", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The Manufacturer Part Number (MPN) of the product, or the product to which the offer refers.", - "rdfs:label": "mpn" - }, - { - "@id": "schema:offers", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:MediaObject", - "schema:Event", - "schema:Product" - ], - "rangeIncludes": "schema:Offer", - "rdfs:comment": "An offer to provide this item—for example, an offer to sell a product, rent the DVD of a movie, or give away tickets to an event.", - "rdfs:label": "offers" - }, - { - "@id": "schema:productID", - "@type": "rdf:Property", - "domainIncludes": "schema:Product", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The product identifier, such as ISBN. For example: <meta itemprop='productID' content='isbn:123-456-789'/>.", - "rdfs:label": "productID" - }, - { - "@id": "schema:releaseDate", - "@type": "rdf:Property", - "domainIncludes": "schema:Product", - "rangeIncludes": "schema:Date", - "rdfs:comment": "The release date of a product or product model. This can be used to distinguish the exact variant of a product.", - "rdfs:label": "releaseDate" - }, - { - "@id": "schema:review", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:Offer", - "schema:Product" - ], - "rangeIncludes": "schema:Review", - "rdfs:comment": "A review of the item.", - "rdfs:label": "review" - }, - { - "@id": "schema:reviews", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:CreativeWork", - "schema:Organization", - "schema:Place", - "schema:Offer", - "schema:Product" - ], - "rangeIncludes": "schema:Review", - "rdfs:comment": "Review of the item (legacy spelling; see singular form, review).", - "rdfs:label": "reviews", - "supercededBy": "schema:review" - }, - { - "@id": "schema:sku", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Offer", - "schema:Product", - "schema:Demand" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "The Stock Keeping Unit (SKU), i.e. a merchant-specific identifier for a product or service, or the product to which the offer refers.", - "rdfs:label": "sku" - }, - { - "@id": "schema:weight", - "@type": "rdf:Property", - "domainIncludes": "schema:Product", - "rangeIncludes": "schema:QuantitativeValue", - "rdfs:comment": "The weight of the product.", - "rdfs:label": "weight" - }, - { - "@id": "schema:width", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:MediaObject", - "schema:Product" - ], - "rangeIncludes": [ - "schema:Distance", - "schema:QuantitativeValue" - ], - "rdfs:comment": "The width of the item.", - "rdfs:label": "width" - } - ] - } - ], - "properties": [ - { - "@id": "schema:additionalType", - "@type": "rdf:Property", - "domainIncludes": "schema:Thing", - "rangeIncludes": "schema:URL", - "rdfs:comment": "An additional type for the item, typically used for adding more specific types from external vocabularies in microdata syntax. This is a relationship between something and a class that the thing is in. In RDFa syntax, it is better to use the native RDFa syntax - the 'typeof' attribute - for multiple types. Schema.org tools may have only weaker understanding of extra types, in particular those defined externally.", - "rdfs:label": "additionalType" - }, - { - "@id": "schema:alternateName", - "@type": "rdf:Property", - "domainIncludes": [ - "schema:Thing", - "schema:MedicalEntity" - ], - "rangeIncludes": "schema:Text", - "rdfs:comment": "An alias for the item.", - "rdfs:label": "alternateName" - }, - { - "@id": "schema:description", - "@type": "rdf:Property", - "domainIncludes": "schema:Thing", - "rangeIncludes": "schema:Text", - "rdfs:comment": "A short description of the item.", - "rdfs:label": "description" - }, - { - "@id": "schema:image", - "@type": "rdf:Property", - "domainIncludes": "schema:Thing", - "rangeIncludes": "schema:URL", - "rdfs:comment": "URL of an image of the item.", - "rdfs:label": "image" - }, - { - "@id": "schema:name", - "@type": "rdf:Property", - "domainIncludes": "schema:Thing", - "rangeIncludes": "schema:Text", - "rdfs:comment": "The name of the item.", - "rdfs:label": "name" - }, - { - "@id": "schema:potentialAction", - "@type": "rdf:Property", - "domainIncludes": "schema:Thing", - "rangeIncludes": "schema:Action", - "rdfs:comment": "Indicates a potential Action, which describes an idealized action in which this thing would play an 'object' role.", - "rdfs:label": "potentialAction" - }, - { - "@id": "schema:sameAs", - "@type": "rdf:Property", - "domainIncludes": "schema:Thing", - "rangeIncludes": "schema:URL", - "rdfs:comment": "URL of a reference Web page that unambiguously indicates the item's identity. E.g. the URL of the item's Wikipedia page, Freebase page, or official website.", - "rdfs:label": "sameAs" - }, - { - "@id": "schema:url", - "@type": "rdf:Property", - "domainIncludes": "schema:Thing", - "rangeIncludes": "schema:URL", - "rdfs:comment": "URL of the item.", - "rdfs:label": "url" - } - ] - }, - { - "@id": "schema:DataType", - "@type": "rdfs:Class", - "rdfs:comment": "The basic data types such as Integers, Strings, etc.", - "rdfs:label": "DataType", - "children": [ - { - "@id": "schema:Boolean", - "@type": "rdfs:Class", - "rdfs:comment": "Boolean: True or False.", - "rdfs:label": "Boolean", - "rdfs:subClassOf": "schema:DataType" - }, - { - "@id": "schema:Date", - "@type": "rdfs:Class", - "rdfs:comment": "A date value in ISO 8601 date format.", - "rdfs:label": "Date", - "rdfs:subClassOf": "schema:DataType" - }, - { - "@id": "schema:DateTime", - "@type": "rdfs:Class", - "rdfs:comment": "A combination of date and time of day in the form [-]CCYY-MM-DDThh:mm:ss[Z|(+|-)hh:mm] (see Chapter 5.4 of ISO 8601).", - "rdfs:label": "DateTime", - "rdfs:subClassOf": "schema:DataType" - }, - { - "@id": "schema:Number", - "@type": "rdfs:Class", - "rdfs:comment": "Data type: Number.", - "rdfs:label": "Number", - "rdfs:subClassOf": "schema:DataType", - "children": [ - { - "@id": "schema:Float", - "@type": "rdfs:Class", - "rdfs:comment": "Data type: Floating number.", - "rdfs:label": "Float", - "rdfs:subClassOf": "schema:Number" - }, - { - "@id": "schema:Integer", - "@type": "rdfs:Class", - "rdfs:comment": "Data type: Integer.", - "rdfs:label": "Integer", - "rdfs:subClassOf": "schema:Number" - } - ] - }, - { - "@id": "schema:Text", - "@type": "rdfs:Class", - "rdfs:comment": "Data type: Text.", - "rdfs:label": "Text", - "rdfs:subClassOf": "schema:DataType", - "children": [ - { - "@id": "schema:URL", - "@type": "rdfs:Class", - "rdfs:comment": "Data type: URL.", - "rdfs:label": "URL", - "rdfs:subClassOf": "schema:Text" - } - ] - }, - { - "@id": "schema:Time", - "@type": "rdfs:Class", - "rdfs:comment": "A point in time recurring on multiple days in the form hh:mm:ss[Z|(+|-)hh:mm] (see XML schema for details).", - "rdfs:label": "Time", - "rdfs:subClassOf": "schema:DataType" - } - ] - } - ] -} diff --git a/json-ld.gemspec b/json-ld.gemspec index 2e92c469..cf6f869a 100755 --- a/json-ld.gemspec +++ b/json-ld.gemspec @@ -1,47 +1,44 @@ #!/usr/bin/env ruby -rubygems -# -*- encoding: utf-8 -*- +# frozen_string_literal: true is_java = RUBY_PLATFORM == 'java' Gem::Specification.new do |gem| gem.version = File.read('VERSION').chomp - gem.date = File.mtime('VERSION').strftime('%Y-%m-%d') gem.name = "json-ld" gem.homepage = "https://github.com/ruby-rdf/json-ld" gem.license = 'Unlicense' gem.summary = "JSON-LD reader/writer for Ruby." gem.description = "JSON::LD parses and serializes JSON-LD into RDF and implements expansion, compaction and framing API interfaces for the Ruby RDF.rb library suite." - gem.metadata = { + gem.metadata = { "documentation_uri" => "https://ruby-rdf.github.io/json-ld", - "bug_tracker_uri" => "https://github.com/ruby-rdf/json-ld/issues", - "homepage_uri" => "https://github.com/ruby-rdf/json-ld", - "mailing_list_uri" => "https://lists.w3.org/Archives/Public/public-rdf-ruby/", - "source_code_uri" => "https://github.com/ruby-rdf/json-ld", + "bug_tracker_uri" => "https://github.com/ruby-rdf/json-ld/issues", + "homepage_uri" => "https://github.com/ruby-rdf/json-ld", + "mailing_list_uri" => "https://lists.w3.org/Archives/Public/public-rdf-ruby/", + "source_code_uri" => "https://github.com/ruby-rdf/json-ld", + 'rubygems_mfa_required' => 'true' } gem.authors = ['Gregg Kellogg'] gem.email = 'public-linked-json@w3.org' gem.platform = Gem::Platform::RUBY - gem.files = %w(AUTHORS README.md UNLICENSE VERSION) + Dir.glob('lib/**/*.rb') - gem.bindir = %q(bin) - gem.executables = %w(jsonld) - gem.require_paths = %w(lib) - gem.test_files = Dir.glob('spec/**/*.rb') + Dir.glob('spec/test-files/*') + gem.files = %w[AUTHORS README.md UNLICENSE VERSION] + Dir.glob('lib/**/*.rb') + gem.bindir = 'bin' + gem.executables = %w[jsonld] + gem.require_paths = %w[lib] gem.required_ruby_version = '>= 2.6' gem.requirements = [] - gem.add_runtime_dependency 'rdf', '~> 3.2', '>= 3.2.10' + gem.add_runtime_dependency 'htmlentities', '~> 4.3' + gem.add_runtime_dependency 'json-canonicalization', '~> 0.3', '>= 0.3.2' + gem.add_runtime_dependency 'link_header', '~> 0.0', '>= 0.0.8' gem.add_runtime_dependency 'multi_json', '~> 1.15' - gem.add_runtime_dependency 'link_header', '~> 0.0', '>= 0.0.8' - gem.add_runtime_dependency 'json-canonicalization', '~> 0.3' - gem.add_runtime_dependency 'htmlentities', '~> 4.3' gem.add_runtime_dependency "rack", '>= 2.2', '< 4' - gem.add_development_dependency 'sinatra-linkeddata','~> 3.2' + gem.add_runtime_dependency 'rdf', '~> 3.2', '>= 3.2.10' gem.add_development_dependency 'jsonlint', '~> 0.4' unless is_java - gem.add_development_dependency 'oj', '~> 3.14' unless is_java - gem.add_development_dependency 'yajl-ruby', '~> 1.4' unless is_java + gem.add_development_dependency 'oj', '~> 3.14' unless is_java gem.add_development_dependency 'rack-test', '>= 1.1', '< 3' gem.add_development_dependency 'rdf-isomorphic', '~> 3.2' gem.add_development_dependency 'rdf-spec', '~> 3.2' @@ -51,7 +48,9 @@ Gem::Specification.new do |gem| gem.add_development_dependency 'rdf-xsd', '~> 3.2' gem.add_development_dependency 'rspec', '~> 3.12' gem.add_development_dependency 'rspec-its', '~> 1.3' - gem.add_development_dependency 'yard' , '~> 0.9' + gem.add_development_dependency 'sinatra-linkeddata', '~> 3.2' + gem.add_development_dependency 'yajl-ruby', '~> 1.4' unless is_java + gem.add_development_dependency 'yard', '~> 0.9' - gem.post_install_message = nil + gem.post_install_message = nil end diff --git a/lib/json/ld.rb b/lib/json/ld.rb index 3b7caf00..52c3b501 100644 --- a/lib/json/ld.rb +++ b/lib/json/ld.rb @@ -1,6 +1,6 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true -$:.unshift(File.expand_path("../ld", __FILE__)) + +$LOAD_PATH.unshift(File.expand_path('ld', __dir__)) require 'rdf' # @see https://rubygems.org/gems/rdf require 'multi_json' require 'set' @@ -40,47 +40,47 @@ module LD autoload :Writer, 'json/ld/writer' # JSON-LD profiles - JSON_LD_NS = "http://www.w3.org/ns/json-ld#" - PROFILES = %w(expanded compacted flattened framed).map {|p| JSON_LD_NS + p}.freeze + JSON_LD_NS = 'http://www.w3.org/ns/json-ld#' + PROFILES = %w[expanded compacted flattened framed].map { |p| JSON_LD_NS + p }.freeze # Default context when compacting without one being specified - DEFAULT_CONTEXT = "http://schema.org" + DEFAULT_CONTEXT = 'http://schema.org' # Acceptable MultiJson adapters - MUTLI_JSON_ADAPTERS = %i(oj json_gem json_pure ok_json yajl nsjsonseerialization) + MUTLI_JSON_ADAPTERS = %i[oj json_gem json_pure ok_json yajl nsjsonseerialization] - KEYWORDS = Set.new(%w( - @annotation - @base - @container - @context - @default - @direction - @embed - @explicit - @first - @graph - @id - @import - @included - @index - @json - @language - @list - @nest - @none - @omitDefault - @propagate - @protected - @preserve - @requireAll - @reverse - @set - @type - @value - @version - @vocab - )).freeze + KEYWORDS = Set.new(%w[ + @annotation + @base + @container + @context + @default + @direction + @embed + @explicit + @first + @graph + @id + @import + @included + @index + @json + @language + @list + @nest + @none + @omitDefault + @propagate + @protected + @preserve + @requireAll + @reverse + @set + @type + @value + @version + @vocab + ]).freeze # Regexp matching an NCName. NC_REGEXP = Regexp.new( @@ -90,87 +90,94 @@ module LD ( [a-zA-Z_] | \\\\u[0-9a-fA-F] ) - ( [0-9a-zA-Z_\.-] + ( [0-9a-zA-Z_.-] | \\\\u([0-9a-fA-F]{4}) )* $}, - Regexp::EXTENDED) + Regexp::EXTENDED + ) # Datatypes that are expressed in a native form and don't expand or compact NATIVE_DATATYPES = [RDF::XSD.integer.to_s, RDF::XSD.boolean.to_s, RDF::XSD.double.to_s] JSON_STATE = JSON::State.new( - indent: " ", - space: " ", - space_before: "", + indent: ' ', + space: ' ', + space_before: '', object_nl: "\n", array_nl: "\n" ) MAX_CONTEXTS_LOADED = 50 + # URI Constants + RDF_JSON = RDF::URI("#{RDF.to_uri}JSON") + RDF_DIRECTION = RDF::URI("#{RDF.to_uri}direction") + RDF_LANGUAGE = RDF::URI("#{RDF.to_uri}language") + class JsonLdError < StandardError def to_s "#{self.class.instance_variable_get :@code}: #{super}" end + def code self.class.instance_variable_get :@code end - class CollidingKeywords < JsonLdError; @code = "colliding keywords"; end - class ConflictingIndexes < JsonLdError; @code = "conflicting indexes"; end - class CyclicIRIMapping < JsonLdError; @code = "cyclic IRI mapping"; end - class InvalidAnnotation < JsonLdError; @code = "invalid annotation"; end - class InvalidBaseIRI < JsonLdError; @code = "invalid base IRI"; end - class InvalidContainerMapping < JsonLdError; @code = "invalid container mapping"; end - class InvalidContextEntry < JsonLdError; @code = "invalid context entry"; end - class InvalidContextNullification < JsonLdError; @code = "invalid context nullification"; end - class InvalidDefaultLanguage < JsonLdError; @code = "invalid default language"; end - class InvalidIdValue < JsonLdError; @code = "invalid @id value"; end - class InvalidIndexValue < JsonLdError; @code = "invalid @index value"; end - class InvalidVersionValue < JsonLdError; @code = "invalid @version value"; end - class InvalidImportValue < JsonLdError; @code = "invalid @import value"; end - class InvalidIncludedValue < JsonLdError; @code = "invalid @included value"; end - class InvalidIRIMapping < JsonLdError; @code = "invalid IRI mapping"; end - class InvalidJsonLiteral < JsonLdError; @code = "invalid JSON literal"; end - class InvalidKeywordAlias < JsonLdError; @code = "invalid keyword alias"; end - class InvalidLanguageMapping < JsonLdError; @code = "invalid language mapping"; end - class InvalidLanguageMapValue < JsonLdError; @code = "invalid language map value"; end - class InvalidLanguageTaggedString < JsonLdError; @code = "invalid language-tagged string"; end - class InvalidLanguageTaggedValue < JsonLdError; @code = "invalid language-tagged value"; end - class InvalidLocalContext < JsonLdError; @code = "invalid local context"; end - class InvalidNestValue < JsonLdError; @code = "invalid @nest value"; end - class InvalidPrefixValue < JsonLdError; @code = "invalid @prefix value"; end - class InvalidPropagateValue < JsonLdError; @code = "invalid @propagate value"; end - class InvalidEmbeddedNode < JsonLdError; @code = "invalid embedded node"; end - class InvalidRemoteContext < JsonLdError; @code = "invalid remote context"; end - class InvalidReverseProperty < JsonLdError; @code = "invalid reverse property"; end - class InvalidReversePropertyMap < JsonLdError; @code = "invalid reverse property map"; end - class InvalidReversePropertyValue < JsonLdError; @code = "invalid reverse property value"; end - class InvalidReverseValue < JsonLdError; @code = "invalid @reverse value"; end - class InvalidScopedContext < JsonLdError; @code = "invalid scoped context"; end - class InvalidScriptElement < JsonLdError; @code = "invalid script element"; end - class InvalidSetOrListObject < JsonLdError; @code = "invalid set or list object"; end + class CollidingKeywords < JsonLdError; @code = 'colliding keywords'; end + class ConflictingIndexes < JsonLdError; @code = 'conflicting indexes'; end + class CyclicIRIMapping < JsonLdError; @code = 'cyclic IRI mapping'; end + class InvalidAnnotation < JsonLdError; @code = 'invalid annotation'; end + class InvalidBaseIRI < JsonLdError; @code = 'invalid base IRI'; end + class InvalidContainerMapping < JsonLdError; @code = 'invalid container mapping'; end + class InvalidContextEntry < JsonLdError; @code = 'invalid context entry'; end + class InvalidContextNullification < JsonLdError; @code = 'invalid context nullification'; end + class InvalidDefaultLanguage < JsonLdError; @code = 'invalid default language'; end + class InvalidIdValue < JsonLdError; @code = 'invalid @id value'; end + class InvalidIndexValue < JsonLdError; @code = 'invalid @index value'; end + class InvalidVersionValue < JsonLdError; @code = 'invalid @version value'; end + class InvalidImportValue < JsonLdError; @code = 'invalid @import value'; end + class InvalidIncludedValue < JsonLdError; @code = 'invalid @included value'; end + class InvalidIRIMapping < JsonLdError; @code = 'invalid IRI mapping'; end + class InvalidJsonLiteral < JsonLdError; @code = 'invalid JSON literal'; end + class InvalidKeywordAlias < JsonLdError; @code = 'invalid keyword alias'; end + class InvalidLanguageMapping < JsonLdError; @code = 'invalid language mapping'; end + class InvalidLanguageMapValue < JsonLdError; @code = 'invalid language map value'; end + class InvalidLanguageTaggedString < JsonLdError; @code = 'invalid language-tagged string'; end + class InvalidLanguageTaggedValue < JsonLdError; @code = 'invalid language-tagged value'; end + class InvalidLocalContext < JsonLdError; @code = 'invalid local context'; end + class InvalidNestValue < JsonLdError; @code = 'invalid @nest value'; end + class InvalidPrefixValue < JsonLdError; @code = 'invalid @prefix value'; end + class InvalidPropagateValue < JsonLdError; @code = 'invalid @propagate value'; end + class InvalidEmbeddedNode < JsonLdError; @code = 'invalid embedded node'; end + class InvalidRemoteContext < JsonLdError; @code = 'invalid remote context'; end + class InvalidReverseProperty < JsonLdError; @code = 'invalid reverse property'; end + class InvalidReversePropertyMap < JsonLdError; @code = 'invalid reverse property map'; end + class InvalidReversePropertyValue < JsonLdError; @code = 'invalid reverse property value'; end + class InvalidReverseValue < JsonLdError; @code = 'invalid @reverse value'; end + class InvalidScopedContext < JsonLdError; @code = 'invalid scoped context'; end + class InvalidScriptElement < JsonLdError; @code = 'invalid script element'; end + class InvalidSetOrListObject < JsonLdError; @code = 'invalid set or list object'; end class InvalidStreamingKeyOrder < JsonLdError; @code = 'invalid streaming key order' end - class InvalidTermDefinition < JsonLdError; @code = "invalid term definition"; end - class InvalidBaseDirection < JsonLdError; @code = "invalid base direction"; end - class InvalidTypedValue < JsonLdError; @code = "invalid typed value"; end - class InvalidTypeMapping < JsonLdError; @code = "invalid type mapping"; end - class InvalidTypeValue < JsonLdError; @code = "invalid type value"; end - class InvalidValueObject < JsonLdError; @code = "invalid value object"; end - class InvalidValueObjectValue < JsonLdError; @code = "invalid value object value"; end - class InvalidVocabMapping < JsonLdError; @code = "invalid vocab mapping"; end - class IRIConfusedWithPrefix < JsonLdError; @code = "IRI confused with prefix"; end - class KeywordRedefinition < JsonLdError; @code = "keyword redefinition"; end - class LoadingDocumentFailed < JsonLdError; @code = "loading document failed"; end - class LoadingRemoteContextFailed < JsonLdError; @code = "loading remote context failed"; end - class ContextOverflow < JsonLdError; @code = "context overflow"; end - class MissingIncludedReferent < JsonLdError; @code = "missing @included referent"; end - class MultipleContextLinkHeaders < JsonLdError; @code = "multiple context link headers"; end - class ProtectedTermRedefinition < JsonLdError; @code = "protected term redefinition"; end - class ProcessingModeConflict < JsonLdError; @code = "processing mode conflict"; end - class InvalidFrame < JsonLdError; @code = "invalid frame"; end - class InvalidEmbedValue < InvalidFrame; @code = "invalid @embed value"; end + class InvalidTermDefinition < JsonLdError; @code = 'invalid term definition'; end + class InvalidBaseDirection < JsonLdError; @code = 'invalid base direction'; end + class InvalidTypedValue < JsonLdError; @code = 'invalid typed value'; end + class InvalidTypeMapping < JsonLdError; @code = 'invalid type mapping'; end + class InvalidTypeValue < JsonLdError; @code = 'invalid type value'; end + class InvalidValueObject < JsonLdError; @code = 'invalid value object'; end + class InvalidValueObjectValue < JsonLdError; @code = 'invalid value object value'; end + class InvalidVocabMapping < JsonLdError; @code = 'invalid vocab mapping'; end + class IRIConfusedWithPrefix < JsonLdError; @code = 'IRI confused with prefix'; end + class KeywordRedefinition < JsonLdError; @code = 'keyword redefinition'; end + class LoadingDocumentFailed < JsonLdError; @code = 'loading document failed'; end + class LoadingRemoteContextFailed < JsonLdError; @code = 'loading remote context failed'; end + class ContextOverflow < JsonLdError; @code = 'context overflow'; end + class MissingIncludedReferent < JsonLdError; @code = 'missing @included referent'; end + class MultipleContextLinkHeaders < JsonLdError; @code = 'multiple context link headers'; end + class ProtectedTermRedefinition < JsonLdError; @code = 'protected term redefinition'; end + class ProcessingModeConflict < JsonLdError; @code = 'processing mode conflict'; end + class InvalidFrame < JsonLdError; @code = 'invalid frame'; end + class InvalidEmbedValue < InvalidFrame; @code = 'invalid @embed value'; end end end end diff --git a/lib/json/ld/api.rb b/lib/json/ld/api.rb index 9a8e8690..965d3dc7 100644 --- a/lib/json/ld/api.rb +++ b/lib/json/ld/api.rb @@ -1,5 +1,7 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true + +require 'English' + require 'openssl' require 'cgi' require 'json/ld/expand' @@ -14,886 +16,920 @@ rescue LoadError end -module JSON::LD - ## - # A JSON-LD processor based on the JsonLdProcessor interface. - # - # This API provides a clean mechanism that enables developers to convert JSON-LD data into a a variety of output formats that are easier to work with in various programming languages. If a JSON-LD API is provided in a programming environment, the entirety of the following API must be implemented. - # - # Note that the API method signatures are somewhat different than what is specified, as the use of Futures and explicit callback parameters is not as relevant for Ruby-based interfaces. - # - # @see https://www.w3.org/TR/json-ld11-api/#the-application-programming-interface - # @author [Gregg Kellogg](http://greggkellogg.net/) - class API - include Expand - include Compact - include ToRDF - include Flatten - include FromRDF - include Frame - include RDF::Util::Logger - - # Options used for open_file - OPEN_OPTS = { - headers: {"Accept" => "application/ld+json, text/html;q=0.8, application/xhtml+xml;q=0.8, application/json;q=0.5"} - } - - # The following constants are used to reduce object allocations - LINK_REL_CONTEXT = %w(rel http://www.w3.org/ns/json-ld#context).freeze - LINK_REL_ALTERNATE = %w(rel alternate).freeze - LINK_TYPE_JSONLD = %w(type application/ld+json).freeze - JSON_LD_PROCESSING_MODES = %w(json-ld-1.0 json-ld-1.1).freeze - - # Current input - # @!attribute [rw] input - # @return [String, #read, Hash, Array] - attr_accessor :value - - # Input evaluation context - # @!attribute [rw] context - # @return [JSON::LD::Context] - attr_accessor :context - - # Current Blank Node Namer - # @!attribute [r] namer - # @return [JSON::LD::BlankNodeNamer] - attr_reader :namer - +module JSON + module LD ## - # Initialize the API, reading in any document and setting global options + # A JSON-LD processor based on the JsonLdProcessor interface. # - # @param [String, #read, Hash, Array] input - # @param [String, #read, Hash, Array, JSON::LD::Context] context - # An external context to use additionally to the context embedded in input when expanding the input. - # @param [Hash{Symbol => Object}] options - # @option options [Symbol] :adapter used with MultiJson - # @option options [RDF::URI, String, #to_s] :base - # The Base IRI to use when expanding the document. This overrides the value of `input` if it is a _IRI_. If not specified and `input` is not an _IRI_, the base IRI defaults to the current document IRI if in a browser context, or the empty string if there is no document context. If not specified, and a base IRI is found from `input`, options[:base] will be modified with this value. - # @option options [Boolean] :compactArrays (true) - # If set to `true`, the JSON-LD processor replaces arrays with just one element with that element during compaction. If set to `false`, all arrays will remain arrays even if they have just one element. - # @option options [Boolean] :compactToRelative (true) - # Creates document relative IRIs when compacting, if `true`, otherwise leaves expanded. - # @option options [Proc] :documentLoader - # The callback of the loader to be used to retrieve remote documents and contexts. If specified, it must be used to retrieve remote documents and contexts; otherwise, if not specified, the processor's built-in loader must be used. See {documentLoader} for the method signature. - # @option options [String, #read, Hash, Array, JSON::LD::Context] :expandContext - # A context that is used to initialize the active context when expanding a document. - # @option options [Boolean] :extendedRepresentation (false) - # Use the extended internal representation. - # @option options [Boolean] :extractAllScripts - # If set, when given an HTML input without a fragment identifier, extracts all `script` elements with type `application/ld+json` into an array during expansion. - # @option options [Boolean, String, RDF::URI] :flatten - # If set to a value that is not `false`, the JSON-LD processor must modify the output of the Compaction Algorithm or the Expansion Algorithm by coalescing all properties associated with each subject via the Flattening Algorithm. The value of `flatten must` be either an _IRI_ value representing the name of the graph to flatten, or `true`. If the value is `true`, then the first graph encountered in the input document is selected and flattened. - # @option options [String] :language - # When set, this has the effect of inserting a context definition with `@language` set to the associated value, creating a default language for interpreting string values. - # @option options [Symbol] :library - # One of :nokogiri or :rexml. If nil/unspecified uses :nokogiri if available, :rexml otherwise. - # @option options [Boolean] :lowercaseLanguage - # By default, language tags are left as is. To normalize to lowercase, set this option to `true`. - # @option options [Boolean] :ordered (true) - # Order traversal of dictionary members by key when performing algorithms. - # @option options [String] :processingMode - # Processing mode, json-ld-1.0 or json-ld-1.1. - # @option options [Boolean] :rdfstar (false) - # support parsing JSON-LD-star statement resources. - # @option options [Boolean] :rename_bnodes (true) - # Rename bnodes as part of expansion, or keep them the same. - # @option options [Boolean] :unique_bnodes (false) - # Use unique bnode identifiers, defaults to using the identifier which the node was originally initialized with (if any). - # @option options [Boolean] :validate Validate input, if a string or readable object. - # @yield [api] - # @yieldparam [API] - # @raise [JsonLdError] - def initialize(input, context, **options, &block) - @options = { - compactArrays: true, - ordered: false, - extractAllScripts: false, - rename_bnodes: true, - unique_bnodes: false, - }.merge(options) - @namer = @options[:unique_bnodes] ? BlankNodeUniqer.new : (@options[:rename_bnodes] ? BlankNodeNamer.new("b") : BlankNodeMapper.new) - - @options[:base] = RDF::URI(@options[:base]) if @options[:base] && !@options[:base].is_a?(RDF::URI) - # For context via Link header - _, context_ref = nil, nil - - @value = case input - when Array, Hash then input.dup - when IO, StringIO, String - remote_doc = self.class.loadRemoteDocument(input, **@options) - - context_ref = remote_doc.contextUrl - @options[:base] = RDF::URI(remote_doc.documentUrl) if remote_doc.documentUrl && !@options[:no_default_base] - - case remote_doc.document - when String - mj_opts = options.keep_if {|k,v| k != :adapter || MUTLI_JSON_ADAPTERS.include?(v)} - MultiJson.load(remote_doc.document, **mj_opts) + # This API provides a clean mechanism that enables developers to convert JSON-LD data into a a variety of output formats that are easier to work with in various programming languages. If a JSON-LD API is provided in a programming environment, the entirety of the following API must be implemented. + # + # Note that the API method signatures are somewhat different than what is specified, as the use of Futures and explicit callback parameters is not as relevant for Ruby-based interfaces. + # + # @see https://www.w3.org/TR/json-ld11-api/#the-application-programming-interface + # @author [Gregg Kellogg](http://greggkellogg.net/) + class API + include Expand + include Compact + include ToRDF + include Flatten + include FromRDF + include Frame + include RDF::Util::Logger + + # Options used for open_file + OPEN_OPTS = { + headers: { "Accept" => "application/ld+json, text/html;q=0.8, application/xhtml+xml;q=0.8, application/json;q=0.5" } + } + + # The following constants are used to reduce object allocations + LINK_REL_CONTEXT = %w[rel http://www.w3.org/ns/json-ld#context].freeze + LINK_REL_ALTERNATE = %w[rel alternate].freeze + LINK_TYPE_JSONLD = %w[type application/ld+json].freeze + JSON_LD_PROCESSING_MODES = %w[json-ld-1.0 json-ld-1.1].freeze + + # Current input + # @!attribute [rw] input + # @return [String, #read, Hash, Array] + attr_accessor :value + + # Input evaluation context + # @!attribute [rw] context + # @return [JSON::LD::Context] + attr_accessor :context + + # Current Blank Node Namer + # @!attribute [r] namer + # @return [JSON::LD::BlankNodeNamer] + attr_reader :namer + + ## + # Initialize the API, reading in any document and setting global options + # + # @param [String, #read, Hash, Array] input + # @param [String, #read, Hash, Array, JSON::LD::Context] context + # An external context to use additionally to the context embedded in input when expanding the input. + # @param [Hash{Symbol => Object}] options + # @option options [Symbol] :adapter used with MultiJson + # @option options [RDF::URI, String, #to_s] :base + # The Base IRI to use when expanding the document. This overrides the value of `input` if it is a _IRI_. If not specified and `input` is not an _IRI_, the base IRI defaults to the current document IRI if in a browser context, or the empty string if there is no document context. If not specified, and a base IRI is found from `input`, options[:base] will be modified with this value. + # @option options [Boolean] :compactArrays (true) + # If set to `true`, the JSON-LD processor replaces arrays with just one element with that element during compaction. If set to `false`, all arrays will remain arrays even if they have just one element. + # @option options [Boolean] :compactToRelative (true) + # Creates document relative IRIs when compacting, if `true`, otherwise leaves expanded. + # @option options [Proc] :documentLoader + # The callback of the loader to be used to retrieve remote documents and contexts. If specified, it must be used to retrieve remote documents and contexts; otherwise, if not specified, the processor's built-in loader must be used. See {documentLoader} for the method signature. + # @option options [String, #read, Hash, Array, JSON::LD::Context] :expandContext + # A context that is used to initialize the active context when expanding a document. + # @option options [Boolean] :extendedRepresentation (false) + # Use the extended internal representation. + # @option options [Boolean] :extractAllScripts + # If set, when given an HTML input without a fragment identifier, extracts all `script` elements with type `application/ld+json` into an array during expansion. + # @option options [Boolean, String, RDF::URI] :flatten + # If set to a value that is not `false`, the JSON-LD processor must modify the output of the Compaction Algorithm or the Expansion Algorithm by coalescing all properties associated with each subject via the Flattening Algorithm. The value of `flatten must` be either an _IRI_ value representing the name of the graph to flatten, or `true`. If the value is `true`, then the first graph encountered in the input document is selected and flattened. + # @option options [String] :language + # When set, this has the effect of inserting a context definition with `@language` set to the associated value, creating a default language for interpreting string values. + # @option options [Symbol] :library + # One of :nokogiri or :rexml. If nil/unspecified uses :nokogiri if available, :rexml otherwise. + # @option options [Boolean] :lowercaseLanguage + # By default, language tags are left as is. To normalize to lowercase, set this option to `true`. + # @option options [Boolean] :ordered (true) + # Order traversal of dictionary members by key when performing algorithms. + # @option options [String] :processingMode + # Processing mode, json-ld-1.0 or json-ld-1.1. + # @option options [Boolean] :rdfstar (false) + # support parsing JSON-LD-star statement resources. + # @option options [Boolean] :rename_bnodes (true) + # Rename bnodes as part of expansion, or keep them the same. + # @option options [Boolean] :unique_bnodes (false) + # Use unique bnode identifiers, defaults to using the identifier which the node was originally initialized with (if any). + # @option options [Boolean] :validate Validate input, if a string or readable object. + # @yield [api] + # @yieldparam [API] + # @raise [JsonLdError] + def initialize(input, context, **options, &block) + @options = { + compactArrays: true, + ordered: false, + extractAllScripts: false, + rename_bnodes: true, + unique_bnodes: false + }.merge(options) + @namer = if @options[:unique_bnodes] + BlankNodeUniqer.new else - # Already parsed - remote_doc.document + (@options[:rename_bnodes] ? BlankNodeNamer.new("b") : BlankNodeMapper.new) end - end - # If not provided, first use context from document, or from a Link header - context ||= context_ref || {} - @context = Context.parse(context, **@options) + @options[:base] = RDF::URI(@options[:base]) if @options[:base] && !@options[:base].is_a?(RDF::URI) + # For context via Link header + _ = nil + context_ref = nil + + @value = case input + when Array, Hash then input.dup + when IO, StringIO, String + remote_doc = self.class.loadRemoteDocument(input, **@options) + + context_ref = remote_doc.contextUrl + @options[:base] = RDF::URI(remote_doc.documentUrl) if remote_doc.documentUrl && !@options[:no_default_base] + + case remote_doc.document + when String + mj_opts = options.keep_if { |k, v| k != :adapter || MUTLI_JSON_ADAPTERS.include?(v) } + MultiJson.load(remote_doc.document, **mj_opts) + else + # Already parsed + remote_doc.document + end + end + + # If not provided, first use context from document, or from a Link header + context ||= context_ref || {} + @context = Context.parse(context, **@options) + + return unless block - if block_given? case block.arity - when 0, -1 then instance_eval(&block) - else block.call(self) + when 0, -1 then instance_eval(&block) + else yield(self) end end - end - # This is used internally only - private :initialize - - ## - # Expands the given input according to the steps in the Expansion Algorithm. The input must be copied, expanded and returned if there are no errors. If the expansion fails, an appropriate exception must be thrown. - # - # The resulting `Array` either returned or yielded - # - # @param [String, #read, Hash, Array] input - # The JSON-LD object to copy and perform the expansion upon. - # @param [Proc] serializer (nil) - # A Serializer method used for generating the JSON serialization of the result. If absent, the internal Ruby objects are returned, which can be transformed to JSON externally via `#to_json`. - # See {JSON::LD::API.serializer}. - # @param [Hash{Symbol => Object}] options - # @option options (see #initialize) - # @raise [JsonLdError] - # @yield jsonld, base_iri - # @yieldparam [Array] jsonld - # The expanded JSON-LD document - # @yieldparam [RDF::URI] base_iri - # The document base as determined during expansion - # @yieldreturn [Object] returned object - # @return [Object, Array] - # If a block is given, the result of evaluating the block is returned, otherwise, the expanded JSON-LD document - # @see https://www.w3.org/TR/json-ld11-api/#expansion-algorithm - def self.expand(input, framing: false, serializer: nil, **options, &block) - result = doc_base = nil - API.new(input, options[:expandContext], **options) do - result = self.expand(self.value, nil, self.context, - framing: framing) - doc_base = @options[:base] - end + # This is used internally only + private :initialize + + ## + # Expands the given input according to the steps in the Expansion Algorithm. The input must be copied, expanded and returned if there are no errors. If the expansion fails, an appropriate exception must be thrown. + # + # The resulting `Array` either returned or yielded + # + # @param [String, #read, Hash, Array] input + # The JSON-LD object to copy and perform the expansion upon. + # @param [Proc] serializer (nil) + # A Serializer method used for generating the JSON serialization of the result. If absent, the internal Ruby objects are returned, which can be transformed to JSON externally via `#to_json`. + # See {JSON::LD::API.serializer}. + # @param [Hash{Symbol => Object}] options + # @option options (see #initialize) + # @raise [JsonLdError] + # @yield jsonld, base_iri + # @yieldparam [Array] jsonld + # The expanded JSON-LD document + # @yieldparam [RDF::URI] base_iri + # The document base as determined during expansion + # @yieldreturn [Object] returned object + # @return [Object, Array] + # If a block is given, the result of evaluating the block is returned, otherwise, the expanded JSON-LD document + # @see https://www.w3.org/TR/json-ld11-api/#expansion-algorithm + def self.expand(input, framing: false, serializer: nil, **options, &block) + result = doc_base = nil + API.new(input, options[:expandContext], **options) do + result = expand(value, nil, context, + framing: framing) + doc_base = @options[:base] + end - # If, after the algorithm outlined above is run, the resulting element is an JSON object with just a @graph property, element is set to the value of @graph's value. - result = result['@graph'] if result.is_a?(Hash) && result.length == 1 && result.key?('@graph') + # If, after the algorithm outlined above is run, the resulting element is an JSON object with just a @graph property, element is set to the value of @graph's value. + result = result['@graph'] if result.is_a?(Hash) && result.length == 1 && result.key?('@graph') - # Finally, if element is a JSON object, it is wrapped into an array. - result = [result].compact unless result.is_a?(Array) - result = serializer.call(result, **options) if serializer + # Finally, if element is a JSON object, it is wrapped into an array. + result = [result].compact unless result.is_a?(Array) + result = serializer.call(result, **options) if serializer - if block_given? - case block.arity - when 1 then yield(result) - when 2 then yield(result, doc_base) + if block + case block.arity + when 1 then yield(result) + when 2 then yield(result, doc_base) + else + raise "Unexpected number of yield parameters to expand" + end else - raise "Unexpected number of yield parameters to expand" + result end - else - result end - end - ## - # Compacts the given input according to the steps in the Compaction Algorithm. The input must be copied, compacted and returned if there are no errors. If the compaction fails, an appropirate exception must be thrown. - # - # If no context is provided, the input document is compacted using the top-level context of the document - # - # The resulting `Hash` is either returned or yielded, if a block is given. - # - # @param [String, #read, Hash, Array] input - # The JSON-LD object to copy and perform the compaction upon. - # @param [String, #read, Hash, Array, JSON::LD::Context] context - # The base context to use when compacting the input. - # @param [Proc] serializer (nil) - # A Serializer instance used for generating the JSON serialization of the result. If absent, the internal Ruby objects are returned, which can be transformed to JSON externally via `#to_json`. - # See {JSON::LD::API.serializer}. - # @param [Boolean] expanded (false) Input is already expanded - # @param [Hash{Symbol => Object}] options - # @option options (see #initialize) - # @yield jsonld - # @yieldparam [Hash] jsonld - # The compacted JSON-LD document - # @yieldreturn [Object] returned object - # @return [Object, Hash] - # If a block is given, the result of evaluating the block is returned, otherwise, the compacted JSON-LD document - # @raise [JsonLdError] - # @see https://www.w3.org/TR/json-ld11-api/#compaction-algorithm - def self.compact(input, context, expanded: false, serializer: nil, **options) - result = nil - options = {compactToRelative: true}.merge(options) - - # 1) Perform the Expansion Algorithm on the JSON-LD input. - # This removes any existing context to allow the given context to be cleanly applied. - expanded_input = expanded ? input : API.expand(input, ordered: false, **options) do |res, base_iri| - options[:base] ||= RDF::URI(base_iri) if base_iri && options[:compactToRelative] - res - end + ## + # Compacts the given input according to the steps in the Compaction Algorithm. The input must be copied, compacted and returned if there are no errors. If the compaction fails, an appropirate exception must be thrown. + # + # If no context is provided, the input document is compacted using the top-level context of the document + # + # The resulting `Hash` is either returned or yielded, if a block is given. + # + # @param [String, #read, Hash, Array] input + # The JSON-LD object to copy and perform the compaction upon. + # @param [String, #read, Hash, Array, JSON::LD::Context] context + # The base context to use when compacting the input. + # @param [Proc] serializer (nil) + # A Serializer instance used for generating the JSON serialization of the result. If absent, the internal Ruby objects are returned, which can be transformed to JSON externally via `#to_json`. + # See {JSON::LD::API.serializer}. + # @param [Boolean] expanded (false) Input is already expanded + # @param [Hash{Symbol => Object}] options + # @option options (see #initialize) + # @yield jsonld + # @yieldparam [Hash] jsonld + # The compacted JSON-LD document + # @yieldreturn [Object] returned object + # @return [Object, Hash] + # If a block is given, the result of evaluating the block is returned, otherwise, the compacted JSON-LD document + # @raise [JsonLdError] + # @see https://www.w3.org/TR/json-ld11-api/#compaction-algorithm + def self.compact(input, context, expanded: false, serializer: nil, **options) + result = nil + options = { compactToRelative: true }.merge(options) - API.new(expanded_input, context, no_default_base: true, **options) do - # log_debug(".compact") {"expanded input: #{expanded_input.to_json(JSON_STATE) rescue 'malformed json'}"} - result = compact(value) + # 1) Perform the Expansion Algorithm on the JSON-LD input. + # This removes any existing context to allow the given context to be cleanly applied. + expanded_input = if expanded + input + else + API.expand(input, ordered: false, **options) do |res, base_iri| + options[:base] ||= RDF::URI(base_iri) if base_iri && options[:compactToRelative] + res + end + end - # xxx) Add the given context to the output - ctx = self.context.serialize(provided_context: context) - if result.is_a?(Array) - kwgraph = self.context.compact_iri('@graph', vocab: true) - result = result.empty? ? {} : {kwgraph => result} + API.new(expanded_input, context, no_default_base: true, **options) do + # log_debug(".compact") {"expanded input: #{expanded_input.to_json(JSON_STATE) rescue 'malformed json'}"} + result = compact(value) + + # xxx) Add the given context to the output + ctx = self.context.serialize(provided_context: context) + if result.is_a?(Array) + kwgraph = self.context.compact_iri('@graph', vocab: true) + result = result.empty? ? {} : { kwgraph => result } + end + result = ctx.merge(result) unless ctx.fetch('@context', {}).empty? end - result = ctx.merge(result) unless ctx.fetch('@context', {}).empty? + result = serializer.call(result, **options) if serializer + block_given? ? yield(result) : result end - result = serializer.call(result, **options) if serializer - block_given? ? yield(result) : result - end - ## - # This algorithm flattens an expanded JSON-LD document by collecting all properties of a node in a single JSON object and labeling all blank nodes with blank node identifiers. This resulting uniform shape of the document, may drastically simplify the code required to process JSON-LD data in certain applications. - # - # The resulting `Array` is either returned, or yielded if a block is given. - # - # @param [String, #read, Hash, Array] input - # The JSON-LD object or array of JSON-LD objects to flatten or an IRI referencing the JSON-LD document to flatten. - # @param [String, #read, Hash, Array, JSON::LD::EvaluationContext] context - # An optional external context to use additionally to the context embedded in input when expanding the input. - # @param [Boolean] expanded (false) Input is already expanded - # @param [Proc] serializer (nil) - # A Serializer instance used for generating the JSON serialization of the result. If absent, the internal Ruby objects are returned, which can be transformed to JSON externally via `#to_json`. - # See {JSON::LD::API.serializer}. - # @param [Hash{Symbol => Object}] options - # @option options (see #initialize) - # @option options [Boolean] :createAnnotations - # Unfold embedded nodes which can be represented using `@annotation`. - # @yield jsonld - # @yieldparam [Hash] jsonld - # The flattened JSON-LD document - # @yieldreturn [Object] returned object - # @return [Object, Hash] - # If a block is given, the result of evaluating the block is returned, otherwise, the flattened JSON-LD document - # @see https://www.w3.org/TR/json-ld11-api/#framing-algorithm - def self.flatten(input, context, expanded: false, serializer: nil, **options) - flattened = [] - options = { - compactToRelative: true, - extractAllScripts: true, - }.merge(options) - - # Expand input to simplify processing - expanded_input = expanded ? input : API.expand(input, **options) do |result, base_iri| - options[:base] ||= RDF::URI(base_iri) if base_iri && options[:compactToRelative] - result - end + ## + # This algorithm flattens an expanded JSON-LD document by collecting all properties of a node in a single JSON object and labeling all blank nodes with blank node identifiers. This resulting uniform shape of the document, may drastically simplify the code required to process JSON-LD data in certain applications. + # + # The resulting `Array` is either returned, or yielded if a block is given. + # + # @param [String, #read, Hash, Array] input + # The JSON-LD object or array of JSON-LD objects to flatten or an IRI referencing the JSON-LD document to flatten. + # @param [String, #read, Hash, Array, JSON::LD::EvaluationContext] context + # An optional external context to use additionally to the context embedded in input when expanding the input. + # @param [Boolean] expanded (false) Input is already expanded + # @param [Proc] serializer (nil) + # A Serializer instance used for generating the JSON serialization of the result. If absent, the internal Ruby objects are returned, which can be transformed to JSON externally via `#to_json`. + # See {JSON::LD::API.serializer}. + # @param [Hash{Symbol => Object}] options + # @option options (see #initialize) + # @option options [Boolean] :createAnnotations + # Unfold embedded nodes which can be represented using `@annotation`. + # @yield jsonld + # @yieldparam [Hash] jsonld + # The flattened JSON-LD document + # @yieldreturn [Object] returned object + # @return [Object, Hash] + # If a block is given, the result of evaluating the block is returned, otherwise, the flattened JSON-LD document + # @see https://www.w3.org/TR/json-ld11-api/#framing-algorithm + def self.flatten(input, context, expanded: false, serializer: nil, **options) + flattened = [] + options = { + compactToRelative: true, + extractAllScripts: true + }.merge(options) + + # Expand input to simplify processing + expanded_input = if expanded + input + else + API.expand(input, **options) do |result, base_iri| + options[:base] ||= RDF::URI(base_iri) if base_iri && options[:compactToRelative] + result + end + end - # Initialize input using - API.new(expanded_input, context, no_default_base: true, **options) do - # log_debug(".flatten") {"expanded input: #{value.to_json(JSON_STATE) rescue 'malformed json'}"} + # Initialize input using + API.new(expanded_input, context, no_default_base: true, **options) do + # log_debug(".flatten") {"expanded input: #{value.to_json(JSON_STATE) rescue 'malformed json'}"} - # Rename blank nodes recusively. Note that this does not create new blank node identifiers where none exist, which is performed in the node map generation algorithm. - @value = rename_bnodes(@value) if @options[:rename_bnodes] + # Rename blank nodes recusively. Note that this does not create new blank node identifiers where none exist, which is performed in the node map generation algorithm. + @value = rename_bnodes(@value) if @options[:rename_bnodes] - # Initialize node map to a JSON object consisting of a single member whose key is @default and whose value is an empty JSON object. - graph_maps = {'@default' => {}} - create_node_map(value, graph_maps) + # Initialize node map to a JSON object consisting of a single member whose key is @default and whose value is an empty JSON object. + graph_maps = { '@default' => {} } + create_node_map(value, graph_maps) - # If create annotations flag is set, then update each node map in graph maps with the result of calling the create annotations algorithm. - if options[:createAnnotations] - graph_maps.values.each do |node_map| - create_annotations(node_map) + # If create annotations flag is set, then update each node map in graph maps with the result of calling the create annotations algorithm. + if options[:createAnnotations] + graph_maps.each_value do |node_map| + create_annotations(node_map) + end end - end - default_graph = graph_maps['@default'] - graph_maps.keys.opt_sort(ordered: @options[:ordered]).each do |graph_name| - next if graph_name == '@default' + default_graph = graph_maps['@default'] + graph_maps.keys.opt_sort(ordered: @options[:ordered]).each do |graph_name| + next if graph_name == '@default' - graph = graph_maps[graph_name] - entry = default_graph[graph_name] ||= {'@id' => graph_name} - nodes = entry['@graph'] ||= [] - graph.keys.opt_sort(ordered: @options[:ordered]).each do |id| - nodes << graph[id] unless node_reference?(graph[id]) + graph = graph_maps[graph_name] + entry = default_graph[graph_name] ||= { '@id' => graph_name } + nodes = entry['@graph'] ||= [] + graph.keys.opt_sort(ordered: @options[:ordered]).each do |id| + nodes << graph[id] unless node_reference?(graph[id]) + end + end + default_graph.keys.opt_sort(ordered: @options[:ordered]).each do |id| + flattened << default_graph[id] unless node_reference?(default_graph[id]) end - end - default_graph.keys.opt_sort(ordered: @options[:ordered]).each do |id| - flattened << default_graph[id] unless node_reference?(default_graph[id]) - end - if context && !flattened.empty? - # Otherwise, return the result of compacting flattened according the Compaction algorithm passing context ensuring that the compaction result uses the @graph keyword (or its alias) at the top-level, even if the context is empty or if there is only one element to put in the @graph array. This ensures that the returned document has a deterministic structure. - compacted = as_array(compact(flattened)) - kwgraph = self.context.compact_iri('@graph', vocab: true) - flattened = self.context. - serialize(provided_context: context). - merge(kwgraph => compacted) + if context && !flattened.empty? + # Otherwise, return the result of compacting flattened according the Compaction algorithm passing context ensuring that the compaction result uses the @graph keyword (or its alias) at the top-level, even if the context is empty or if there is only one element to put in the @graph array. This ensures that the returned document has a deterministic structure. + compacted = as_array(compact(flattened)) + kwgraph = self.context.compact_iri('@graph', vocab: true) + flattened = self.context + .serialize(provided_context: context) + .merge(kwgraph => compacted) + end end - end - flattened = serializer.call(flattened, **options) if serializer - block_given? ? yield(flattened) : flattened - end + flattened = serializer.call(flattened, **options) if serializer + block_given? ? yield(flattened) : flattened + end - ## - # Frames the given input using the frame according to the steps in the Framing Algorithm. The input is used to build the framed output and is returned if there are no errors. If there are no matches for the frame, null must be returned. Exceptions must be thrown if there are errors. - # - # The resulting `Array` is either returned, or yielded if a block is given. - # - # @param [String, #read, Hash, Array] input - # The JSON-LD object to copy and perform the framing on. - # @param [String, #read, Hash, Array] frame - # The frame to use when re-arranging the data. - # @param [Boolean] expanded (false) Input is already expanded - # @option options (see #initialize) - # @option options ['@always', '@link', '@once', '@never'] :embed ('@once') - # a flag specifying that objects should be directly embedded in the output, instead of being referred to by their IRI. - # @option options [Boolean] :explicit (false) - # a flag specifying that for properties to be included in the output, they must be explicitly declared in the framing context. - # @option options [Boolean] :requireAll (false) - # A flag specifying that all properties present in the input frame must either have a default value or be present in the JSON-LD input for the frame to match. - # @option options [Boolean] :omitDefault (false) - # a flag specifying that properties that are missing from the JSON-LD input should be omitted from the output. - # @option options [Boolean] :pruneBlankNodeIdentifiers (true) removes blank node identifiers that are only used once. - # @option options [Boolean] :omitGraph does not use `@graph` at top level unless necessary to describe multiple objects, defaults to `true` if processingMode is 1.1, otherwise `false`. - # @yield jsonld - # @yieldparam [Hash] jsonld - # The framed JSON-LD document - # @yieldreturn [Object] returned object - # @return [Object, Hash] - # If a block is given, the result of evaluating the block is returned, otherwise, the framed JSON-LD document - # @raise [InvalidFrame] - # @see https://www.w3.org/TR/json-ld11-api/#framing-algorithm - def self.frame(input, frame, expanded: false, serializer: nil, **options) - result = nil - options = { - base: (RDF::URI(input) if input.is_a?(String)), - compactArrays: true, - compactToRelative: true, - embed: '@once', - explicit: false, - requireAll: false, - omitDefault: false, - }.merge(options) - - framing_state = { - graphMap: {}, - graphStack: [], - subjectStack: [], - link: {}, - embedded: false # False at the top-level - } + ## + # Frames the given input using the frame according to the steps in the Framing Algorithm. The input is used to build the framed output and is returned if there are no errors. If there are no matches for the frame, null must be returned. Exceptions must be thrown if there are errors. + # + # The resulting `Array` is either returned, or yielded if a block is given. + # + # @param [String, #read, Hash, Array] input + # The JSON-LD object to copy and perform the framing on. + # @param [String, #read, Hash, Array] frame + # The frame to use when re-arranging the data. + # @param [Boolean] expanded (false) Input is already expanded + # @option options (see #initialize) + # @option options ['@always', '@link', '@once', '@never'] :embed ('@once') + # a flag specifying that objects should be directly embedded in the output, instead of being referred to by their IRI. + # @option options [Boolean] :explicit (false) + # a flag specifying that for properties to be included in the output, they must be explicitly declared in the framing context. + # @option options [Boolean] :requireAll (false) + # A flag specifying that all properties present in the input frame must either have a default value or be present in the JSON-LD input for the frame to match. + # @option options [Boolean] :omitDefault (false) + # a flag specifying that properties that are missing from the JSON-LD input should be omitted from the output. + # @option options [Boolean] :pruneBlankNodeIdentifiers (true) removes blank node identifiers that are only used once. + # @option options [Boolean] :omitGraph does not use `@graph` at top level unless necessary to describe multiple objects, defaults to `true` if processingMode is 1.1, otherwise `false`. + # @yield jsonld + # @yieldparam [Hash] jsonld + # The framed JSON-LD document + # @yieldreturn [Object] returned object + # @return [Object, Hash] + # If a block is given, the result of evaluating the block is returned, otherwise, the framed JSON-LD document + # @raise [InvalidFrame] + # @see https://www.w3.org/TR/json-ld11-api/#framing-algorithm + def self.frame(input, frame, expanded: false, serializer: nil, **options) + result = nil + options = { + base: (RDF::URI(input) if input.is_a?(String)), + compactArrays: true, + compactToRelative: true, + embed: '@once', + explicit: false, + requireAll: false, + omitDefault: false + }.merge(options) + + framing_state = { + graphMap: {}, + graphStack: [], + subjectStack: [], + link: {}, + embedded: false # False at the top-level + } + + # de-reference frame to create the framing object + frame = case frame + when Hash then frame.dup + when IO, StringIO, String + remote_doc = loadRemoteDocument(frame, + profile: 'http://www.w3.org/ns/json-ld#frame', + requestProfile: 'http://www.w3.org/ns/json-ld#frame', + **options) + if remote_doc.document.is_a?(String) + mj_opts = options.keep_if { |k, v| k != :adapter || MUTLI_JSON_ADAPTERS.include?(v) } + MultiJson.load(remote_doc.document, **mj_opts) + else + remote_doc.document + end + end - # de-reference frame to create the framing object - frame = case frame - when Hash then frame.dup - when IO, StringIO, String - remote_doc = loadRemoteDocument(frame, - profile: 'http://www.w3.org/ns/json-ld#frame', - requestProfile: 'http://www.w3.org/ns/json-ld#frame', - **options) - if remote_doc.document.is_a?(String) - mj_opts = options.keep_if {|k,v| k != :adapter || MUTLI_JSON_ADAPTERS.include?(v)} - MultiJson.load(remote_doc.document, **mj_opts) + # Expand input to simplify processing + expanded_input = if expanded + input else - remote_doc.document + API.expand(input, ordered: false, **options) do |res, base_iri| + options[:base] ||= RDF::URI(base_iri) if base_iri && options[:compactToRelative] + res + end end - end - # Expand input to simplify processing - expanded_input = expanded ? input : API.expand(input, ordered: false, **options) do |res, base_iri| - options[:base] ||= RDF::URI(base_iri) if base_iri && options[:compactToRelative] - res - end + # Expand frame to simplify processing + expanded_frame = API.expand(frame, framing: true, ordered: false, **options) - # Expand frame to simplify processing - expanded_frame = API.expand(frame, framing: true, ordered: false, **options) + # Initialize input using frame as context + API.new(expanded_input, frame['@context'], no_default_base: true, **options) do + # log_debug(".frame") {"expanded input: #{expanded_input.to_json(JSON_STATE) rescue 'malformed json'}"} + # log_debug(".frame") {"expanded frame: #{expanded_frame.to_json(JSON_STATE) rescue 'malformed json'}"} - # Initialize input using frame as context - API.new(expanded_input, frame['@context'], no_default_base: true, **options) do - # log_debug(".frame") {"expanded input: #{expanded_input.to_json(JSON_STATE) rescue 'malformed json'}"} - # log_debug(".frame") {"expanded frame: #{expanded_frame.to_json(JSON_STATE) rescue 'malformed json'}"} + if %w[@first @last].include?(options[:embed]) && context.processingMode('json-ld-1.1') + if @options[:validate] + raise JSON::LD::JsonLdError::InvalidEmbedValue, + "#{options[:embed]} is not a valid value of @embed in 1.1 mode" + end - if %w(@first @last).include?(options[:embed]) && context.processingMode('json-ld-1.1') - raise JSON::LD::JsonLdError::InvalidEmbedValue, "#{options[:embed]} is not a valid value of @embed in 1.1 mode" if @options[:validate] - warn "[DEPRECATION] #{options[:embed]} is not a valid value of @embed in 1.1 mode.\n" - end + warn "[DEPRECATION] #{options[:embed]} is not a valid value of @embed in 1.1 mode.\n" + end - # Set omitGraph option, if not present, based on processingMode - unless options.key?(:omitGraph) - options[:omitGraph] = context.processingMode('json-ld-1.1') - end + # Set omitGraph option, if not present, based on processingMode + options[:omitGraph] = context.processingMode('json-ld-1.1') unless options.key?(:omitGraph) - # Rename blank nodes recusively. Note that this does not create new blank node identifiers where none exist, which is performed in the node map generation algorithm. - @value = rename_bnodes(@value) + # Rename blank nodes recusively. Note that this does not create new blank node identifiers where none exist, which is performed in the node map generation algorithm. + @value = rename_bnodes(@value) - # Get framing nodes from expanded input, replacing Blank Node identifiers as necessary - create_node_map(value, framing_state[:graphMap], active_graph: '@default') + # Get framing nodes from expanded input, replacing Blank Node identifiers as necessary + create_node_map(value, framing_state[:graphMap], active_graph: '@default') - frame_keys = frame.keys.map {|k| context.expand_iri(k, vocab: true)} - if frame_keys.include?('@graph') - # If frame contains @graph, it matches the default graph. - framing_state[:graph] = '@default' - else - # If frame does not contain @graph used the merged graph. - framing_state[:graph] = '@merged' - framing_state[:link]['@merged'] = {} - framing_state[:graphMap]['@merged'] = merge_node_map_graphs(framing_state[:graphMap]) - end + frame_keys = frame.keys.map { |k| context.expand_iri(k, vocab: true) } + if frame_keys.include?('@graph') + # If frame contains @graph, it matches the default graph. + framing_state[:graph] = '@default' + else + # If frame does not contain @graph used the merged graph. + framing_state[:graph] = '@merged' + framing_state[:link]['@merged'] = {} + framing_state[:graphMap]['@merged'] = merge_node_map_graphs(framing_state[:graphMap]) + end - framing_state[:subjects] = framing_state[:graphMap][framing_state[:graph]] + framing_state[:subjects] = framing_state[:graphMap][framing_state[:graph]] - result = [] - frame(framing_state, framing_state[:subjects].keys.opt_sort(ordered: @options[:ordered]), (expanded_frame.first || {}), parent: result, **options) + result = [] + frame(framing_state, framing_state[:subjects].keys.opt_sort(ordered: @options[:ordered]), + (expanded_frame.first || {}), parent: result, **options) - # Default to based on processinMode - if !options.key?(:pruneBlankNodeIdentifiers) - options[:pruneBlankNodeIdentifiers] = context.processingMode('json-ld-1.1') - end + # Default to based on processinMode + unless options.key?(:pruneBlankNodeIdentifiers) + options[:pruneBlankNodeIdentifiers] = context.processingMode('json-ld-1.1') + end - # Count blank node identifiers used in the document, if pruning - if options[:pruneBlankNodeIdentifiers] - bnodes_to_clear = count_blank_node_identifiers(result).collect {|k, v| k if v == 1}.compact - result = prune_bnodes(result, bnodes_to_clear) - end + # Count blank node identifiers used in the document, if pruning + if options[:pruneBlankNodeIdentifiers] + bnodes_to_clear = count_blank_node_identifiers(result).collect { |k, v| k if v == 1 }.compact + result = prune_bnodes(result, bnodes_to_clear) + end - # Replace values with `@preserve` with the content of its entry. - result = cleanup_preserve(result) - # log_debug(".frame") {"expanded result: #{result.to_json(JSON_STATE) rescue 'malformed json'}"} + # Replace values with `@preserve` with the content of its entry. + result = cleanup_preserve(result) + # log_debug(".frame") {"expanded result: #{result.to_json(JSON_STATE) rescue 'malformed json'}"} - # Compact result - compacted = compact(result) + # Compact result + compacted = compact(result) - # @replace `@null` with nil, compacting arrays - compacted = cleanup_null(compacted) - compacted = [compacted] unless options[:omitGraph] || compacted.is_a?(Array) + # @replace `@null` with nil, compacting arrays + compacted = cleanup_null(compacted) + compacted = [compacted] unless options[:omitGraph] || compacted.is_a?(Array) - # Add the given context to the output - result = if !compacted.is_a?(Array) - compacted - else - kwgraph = context.compact_iri('@graph', vocab: true) - {kwgraph => compacted} + # Add the given context to the output + result = if compacted.is_a?(Array) + kwgraph = context.compact_iri('@graph', vocab: true) + { kwgraph => compacted } + else + compacted + end + # Only add context if one was provided + result = context.serialize(provided_context: frame).merge(result) if frame['@context'] + + # log_debug(".frame") {"after compact: #{result.to_json(JSON_STATE) rescue 'malformed json'}"} + result end - # Only add context if one was provided - result = context.serialize(provided_context: frame).merge(result) if frame['@context'] - - # log_debug(".frame") {"after compact: #{result.to_json(JSON_STATE) rescue 'malformed json'}"} - result - end - result = serializer.call(result, **options) if serializer - block_given? ? yield(result) : result - end + result = serializer.call(result, **options) if serializer + block_given? ? yield(result) : result + end - ## - # Processes the input according to the RDF Conversion Algorithm, calling the provided callback for each triple generated. - # - # @param [String, #read, Hash, Array] input - # The JSON-LD object to process when outputting statements. - # @param [Boolean] expanded (false) Input is already expanded - # @option options (see #initialize) - # @option options [Boolean] :produceGeneralizedRdf (false) - # If true, output will include statements having blank node predicates, otherwise they are dropped. - # @raise [JsonLdError] - # @yield statement - # @yieldparam [RDF::Statement] statement - # @return [RDF::Enumerable] set of statements, unless a block is given. - def self.toRdf(input, expanded: false, **options, &block) - unless block_given? - results = [] - results.extend(RDF::Enumerable) - self.toRdf(input, expanded: expanded, **options) do |stmt| - results << stmt + ## + # Processes the input according to the RDF Conversion Algorithm, calling the provided callback for each triple generated. + # + # @param [String, #read, Hash, Array] input + # The JSON-LD object to process when outputting statements. + # @param [Boolean] expanded (false) Input is already expanded + # @option options (see #initialize) + # @option options [Boolean] :produceGeneralizedRdf (false) + # If true, output will include statements having blank node predicates, otherwise they are dropped. + # @raise [JsonLdError] + # @yield statement + # @yieldparam [RDF::Statement] statement + # @return [RDF::Enumerable] set of statements, unless a block is given. + def self.toRdf(input, expanded: false, **options) + unless block_given? + results = [] + results.extend(RDF::Enumerable) + toRdf(input, expanded: expanded, **options) do |stmt| + results << stmt + end + return results end - return results - end - options = { - extractAllScripts: true, - }.merge(options) + options = { + extractAllScripts: true + }.merge(options) - # Flatten input to simplify processing - flattened_input = API.flatten(input, nil, expanded: expanded, ordered: false, **options) + # Flatten input to simplify processing + flattened_input = API.flatten(input, nil, expanded: expanded, ordered: false, **options) - API.new(flattened_input, nil, **options) do - # 1) Perform the Expansion Algorithm on the JSON-LD input. - # This removes any existing context to allow the given context to be cleanly applied. - # log_debug(".toRdf") {"flattened input: #{flattened_input.to_json(JSON_STATE) rescue 'malformed json'}"} + API.new(flattened_input, nil, **options) do + # 1) Perform the Expansion Algorithm on the JSON-LD input. + # This removes any existing context to allow the given context to be cleanly applied. + # log_debug(".toRdf") {"flattened input: #{flattened_input.to_json(JSON_STATE) rescue 'malformed json'}"} - # Recurse through input - flattened_input.each do |node| - item_to_rdf(node) do |statement| - next if statement.predicate.node? && !options[:produceGeneralizedRdf] + # Recurse through input + flattened_input.each do |node| + item_to_rdf(node) do |statement| + next if statement.predicate.node? && !options[:produceGeneralizedRdf] - # Drop invalid statements (other than IRIs) - unless statement.valid_extended? - # log_debug(".toRdf") {"drop invalid statement: #{statement.to_nquads}"} - next - end + # Drop invalid statements (other than IRIs) + unless statement.valid_extended? + # log_debug(".toRdf") {"drop invalid statement: #{statement.to_nquads}"} + next + end - yield statement + yield statement + end end end end - end - - ## - # Take an ordered list of RDF::Statements and turn them into a JSON-LD document. - # - # The resulting `Array` is either returned or yielded, if a block is given. - # - # @param [RDF::Enumerable] input - # @param [Boolean] useRdfType (false) - # If set to `true`, the JSON-LD processor will treat `rdf:type` like a normal property instead of using `@type`. - # @param [Boolean] useNativeTypes (false) use native representations - # @param [Proc] serializer (nil) - # A Serializer instance used for generating the JSON serialization of the result. If absent, the internal Ruby objects are returned, which can be transformed to JSON externally via `#to_json`. - # See {JSON::LD::API.serializer}. - # @param [Hash{Symbol => Object}] options - # @option options (see #initialize) - # @yield jsonld - # @yieldparam [Hash] jsonld - # The JSON-LD document in expanded form - # @yieldreturn [Object] returned object - # @return [Object, Hash] - # If a block is given, the result of evaluating the block is returned, otherwise, the expanded JSON-LD document - def self.fromRdf(input, useRdfType: false, useNativeTypes: false, serializer: nil, **options, &block) - result = nil - - API.new(nil, nil, **options) do - result = from_statements(input, - extendedRepresentation: options[:extendedRepresentation], - useRdfType: useRdfType, - useNativeTypes: useNativeTypes) - end - result = serializer.call(result, **options) if serializer - block_given? ? yield(result) : result - end + ## + # Take an ordered list of RDF::Statements and turn them into a JSON-LD document. + # + # The resulting `Array` is either returned or yielded, if a block is given. + # + # @param [RDF::Enumerable] input + # @param [Boolean] useRdfType (false) + # If set to `true`, the JSON-LD processor will treat `rdf:type` like a normal property instead of using `@type`. + # @param [Boolean] useNativeTypes (false) use native representations + # @param [Proc] serializer (nil) + # A Serializer instance used for generating the JSON serialization of the result. If absent, the internal Ruby objects are returned, which can be transformed to JSON externally via `#to_json`. + # See {JSON::LD::API.serializer}. + # @param [Hash{Symbol => Object}] options + # @option options (see #initialize) + # @yield jsonld + # @yieldparam [Hash] jsonld + # The JSON-LD document in expanded form + # @yieldreturn [Object] returned object + # @return [Object, Hash] + # If a block is given, the result of evaluating the block is returned, otherwise, the expanded JSON-LD document + def self.fromRdf(input, useRdfType: false, useNativeTypes: false, serializer: nil, **options) + result = nil + + API.new(nil, nil, **options) do + result = from_statements(input, + extendedRepresentation: options[:extendedRepresentation], + useRdfType: useRdfType, + useNativeTypes: useNativeTypes) + end - ## - # Uses built-in or provided documentLoader to retrieve a parsed document. - # - # @param [RDF::URI, String] url - # @param [Regexp] allowed_content_types - # A regular expression matching other content types allowed - # beyond types for JSON and HTML. - # @param [String, RDF::URI] base - # Location to use as documentUrl instead of `url`. - # @option options [Proc] :documentLoader - # The callback of the loader to be used to retrieve remote documents and contexts. - # @param [Boolean] extractAllScripts - # If set to `true`, when extracting JSON-LD script elements from HTML, unless a specific fragment identifier is targeted, extracts all encountered JSON-LD script elements using an array form, if necessary. - # @param [String] profile - # When the resulting `contentType` is `text/html` or `application/xhtml+xml`, this option determines the profile to use for selecting a JSON-LD script elements. - # @param [String] requestProfile - # One or more IRIs to use in the request as a profile parameter. - # @param [Boolean] validate (false) - # Allow only appropriate content types - # @param [Hash Object>] options - # @yield remote_document - # @yieldparam [RemoteDocumentRemoteDocument, RDF::Util::File::RemoteDocument] remote_document - # @yieldreturn [Object] returned object - # @return [Object, RemoteDocument] - # If a block is given, the result of evaluating the block is returned, otherwise, the retrieved remote document and context information unless block given - # @raise [JsonLdError] - def self.loadRemoteDocument(url, - allowed_content_types: nil, - base: nil, - documentLoader: nil, - extractAllScripts: false, - profile: nil, - requestProfile: nil, - validate: false, - **options) - documentLoader ||= self.method(:documentLoader) - options = OPEN_OPTS.merge(options) - if requestProfile - # Add any request profile - options[:headers]['Accept'] = options[:headers]['Accept'].sub('application/ld+json,', "application/ld+json;profile=#{requestProfile}, application/ld+json;q=0.9,") + result = serializer.call(result, **options) if serializer + block_given? ? yield(result) : result end - documentLoader.call(url, **options) do |remote_doc| - case remote_doc - when RDF::Util::File::RemoteDocument - # Convert to RemoteDocument - context_url = if remote_doc.content_type != 'application/ld+json' && - (remote_doc.content_type == 'application/json' || - remote_doc.content_type.to_s.match?(%r(application/\w+\+json))) - # Get context link(s) - # Note, we can't simply use #find_link, as we need to detect multiple - links = remote_doc.links.links.select do |link| - link.attr_pairs.include?(LINK_REL_CONTEXT) + + ## + # Uses built-in or provided documentLoader to retrieve a parsed document. + # + # @param [RDF::URI, String] url + # @param [Regexp] allowed_content_types + # A regular expression matching other content types allowed + # beyond types for JSON and HTML. + # @param [String, RDF::URI] base + # Location to use as documentUrl instead of `url`. + # @option options [Proc] :documentLoader + # The callback of the loader to be used to retrieve remote documents and contexts. + # @param [Boolean] extractAllScripts + # If set to `true`, when extracting JSON-LD script elements from HTML, unless a specific fragment identifier is targeted, extracts all encountered JSON-LD script elements using an array form, if necessary. + # @param [String] profile + # When the resulting `contentType` is `text/html` or `application/xhtml+xml`, this option determines the profile to use for selecting a JSON-LD script elements. + # @param [String] requestProfile + # One or more IRIs to use in the request as a profile parameter. + # @param [Boolean] validate (false) + # Allow only appropriate content types + # @param [Hash Object>] options + # @yield remote_document + # @yieldparam [RemoteDocumentRemoteDocument, RDF::Util::File::RemoteDocument] remote_document + # @yieldreturn [Object] returned object + # @return [Object, RemoteDocument] + # If a block is given, the result of evaluating the block is returned, otherwise, the retrieved remote document and context information unless block given + # @raise [JsonLdError] + def self.loadRemoteDocument(url, + allowed_content_types: nil, + base: nil, + documentLoader: nil, + extractAllScripts: false, + profile: nil, + requestProfile: nil, + validate: false, + **options) + documentLoader ||= method(:documentLoader) + options = OPEN_OPTS.merge(options) + if requestProfile + # Add any request profile + options[:headers]['Accept'] = + options[:headers]['Accept'].sub('application/ld+json,', + "application/ld+json;profile=#{requestProfile}, application/ld+json;q=0.9,") + end + documentLoader.call(url, **options) do |remote_doc| + case remote_doc + when RDF::Util::File::RemoteDocument + # Convert to RemoteDocument + context_url = if remote_doc.content_type != 'application/ld+json' && + (remote_doc.content_type == 'application/json' || + remote_doc.content_type.to_s.match?(%r{application/\w+\+json})) + # Get context link(s) + # Note, we can't simply use #find_link, as we need to detect multiple + links = remote_doc.links.links.select do |link| + link.attr_pairs.include?(LINK_REL_CONTEXT) + end + if links.length > 1 + raise JSON::LD::JsonLdError::MultipleContextLinkHeaders, + "expected at most 1 Link header with rel=jsonld:context, got #{links.length}" + end + Array(links.first).first end - raise JSON::LD::JsonLdError::MultipleContextLinkHeaders, - "expected at most 1 Link header with rel=jsonld:context, got #{links.length}" if links.length > 1 - Array(links.first).first - end - # If content-type is not application/ld+json, nor any other +json and a link with rel=alternate and type='application/ld+json' is found, use that instead - alternate = !remote_doc.content_type.match?(%r(application/(\w*\+)?json)) && remote_doc.links.links.detect do |link| - link.attr_pairs.include?(LINK_REL_ALTERNATE) && - link.attr_pairs.include?(LINK_TYPE_JSONLD) - end + # If content-type is not application/ld+json, nor any other +json and a link with rel=alternate and type='application/ld+json' is found, use that instead + alternate = !remote_doc.content_type.match?(%r{application/(\w*\+)?json}) && remote_doc.links.links.detect do |link| + link.attr_pairs.include?(LINK_REL_ALTERNATE) && + link.attr_pairs.include?(LINK_TYPE_JSONLD) + end - remote_doc = if alternate - # Load alternate relative to URL - loadRemoteDocument(RDF::URI(url).join(alternate.href), + remote_doc = if alternate + # Load alternate relative to URL + loadRemoteDocument(RDF::URI(url).join(alternate.href), extractAllScripts: extractAllScripts, profile: profile, requestProfile: requestProfile, validate: validate, base: base, - **options) + **options) + else + RemoteDocument.new(remote_doc.read, + documentUrl: remote_doc.base_uri, + contentType: remote_doc.content_type, + contextUrl: context_url) + end + when RemoteDocument + # Pass through else - RemoteDocument.new(remote_doc.read, - documentUrl: remote_doc.base_uri, - contentType: remote_doc.content_type, - contextUrl: context_url) + raise JSON::LD::JsonLdError::LoadingDocumentFailed, + "unknown result from documentLoader: #{remote_doc.class}" end - when RemoteDocument - # Pass through - else - raise JSON::LD::JsonLdError::LoadingDocumentFailed, "unknown result from documentLoader: #{remote_doc.class}" - end - # Use specified document location - remote_doc.documentUrl = base if base - - # Parse any HTML - if remote_doc.document.is_a?(String) - remote_doc.document = case remote_doc.contentType - when 'text/html', 'application/xhtml+xml' - load_html(remote_doc.document, - url: remote_doc.documentUrl, - extractAllScripts: extractAllScripts, - profile: profile, - **options) do |base| - remote_doc.documentUrl = base + # Use specified document location + remote_doc.documentUrl = base if base + + # Parse any HTML + if remote_doc.document.is_a?(String) + remote_doc.document = case remote_doc.contentType + when 'text/html', 'application/xhtml+xml' + load_html(remote_doc.document, + url: remote_doc.documentUrl, + extractAllScripts: extractAllScripts, + profile: profile, + **options) do |base| + remote_doc.documentUrl = base + end + else + validate_input(remote_doc.document, url: remote_doc.documentUrl) if validate + mj_opts = options.keep_if { |k, v| k != :adapter || MUTLI_JSON_ADAPTERS.include?(v) } + MultiJson.load(remote_doc.document, **mj_opts) end - else - validate_input(remote_doc.document, url: remote_doc.documentUrl) if validate - mj_opts = options.keep_if {|k,v| k != :adapter || MUTLI_JSON_ADAPTERS.include?(v)} - MultiJson.load(remote_doc.document, **mj_opts) end - end - if remote_doc.contentType && validate - raise IOError, "url: #{url}, contentType: #{remote_doc.contentType}" unless - remote_doc.contentType.match?(/application\/(.+\+)?json|text\/html|application\/xhtml\+xml/) || - (allowed_content_types && remote_doc.contentType.match?(allowed_content_types)) + if remote_doc.contentType && validate && !(remote_doc.contentType.match?(%r{application/(.+\+)?json|text/html|application/xhtml\+xml}) || + (allowed_content_types && remote_doc.contentType.match?(allowed_content_types))) + raise IOError, "url: #{url}, contentType: #{remote_doc.contentType}" + end + + block_given? ? yield(remote_doc) : remote_doc end - block_given? ? yield(remote_doc) : remote_doc + rescue IOError, MultiJson::ParseError => e + raise JSON::LD::JsonLdError::LoadingDocumentFailed, e.message end - rescue IOError, MultiJson::ParseError => e - raise JSON::LD::JsonLdError::LoadingDocumentFailed, e.message - end - ## - # Default document loader. - # @param [RDF::URI, String] url - # @param [Boolean] extractAllScripts - # If set to `true`, when extracting JSON-LD script elements from HTML, unless a specific fragment identifier is targeted, extracts all encountered JSON-LD script elements using an array form, if necessary. - # @param [String] profile - # When the resulting `contentType` is `text/html` or `application/xhtml+xml`, this option determines the profile to use for selecting a JSON-LD script elements. - # @param [String] requestProfile - # One or more IRIs to use in the request as a profile parameter. - # @param [Hash Object>] options - # @yield remote_document - # @yieldparam [RemoteDocument, RDF::Util::File::RemoteDocument] remote_document - # @raise [IOError] - def self.documentLoader(url, extractAllScripts: false, profile: nil, requestProfile: nil, **options, &block) - case url - when IO, StringIO - base_uri = options[:base] - base_uri ||= url.base_uri if url.respond_to?(:base_uri) - content_type = options[:content_type] - content_type ||= url.content_type if url.respond_to?(:content_type) - context_url = if url.respond_to?(:links) && url.links && - (content_type == 'application/json' || content_type.match?(%r(application/(^ld)+json))) - link = url.links.find_link(LINK_REL_CONTEXT) - link.href if link - end + ## + # Default document loader. + # @param [RDF::URI, String] url + # @param [Boolean] extractAllScripts + # If set to `true`, when extracting JSON-LD script elements from HTML, unless a specific fragment identifier is targeted, extracts all encountered JSON-LD script elements using an array form, if necessary. + # @param [String] profile + # When the resulting `contentType` is `text/html` or `application/xhtml+xml`, this option determines the profile to use for selecting a JSON-LD script elements. + # @param [String] requestProfile + # One or more IRIs to use in the request as a profile parameter. + # @param [Hash Object>] options + # @yield remote_document + # @yieldparam [RemoteDocument, RDF::Util::File::RemoteDocument] remote_document + # @raise [IOError] + def self.documentLoader(url, extractAllScripts: false, profile: nil, requestProfile: nil, **options, &block) + case url + when IO, StringIO + base_uri = options[:base] + base_uri ||= url.base_uri if url.respond_to?(:base_uri) + content_type = options[:content_type] + content_type ||= url.content_type if url.respond_to?(:content_type) + context_url = if url.respond_to?(:links) && url.links && + (content_type == 'application/json' || content_type.match?(%r{application/(^ld)+json})) + link = url.links.find_link(LINK_REL_CONTEXT) + link&.href + end - block.call(RemoteDocument.new(url.read, - documentUrl: base_uri, - contentType: content_type, - contextUrl: context_url)) - else - RDF::Util::File.open_file(url, **options, &block) + yield(RemoteDocument.new(url.read, + documentUrl: base_uri, + contentType: content_type, + contextUrl: context_url)) + else + RDF::Util::File.open_file(url, **options, &block) + end end - end - # Add class method aliases for backwards compatibility - class << self - alias :toRDF :toRdf - alias :fromRDF :fromRdf - end + # Add class method aliases for backwards compatibility + class << self + alias toRDF toRdf + alias fromRDF fromRdf + end - ## - # Load one or more script tags from an HTML source. - # Unescapes and uncomments input, returns the internal representation - # Yields document base - # @param [String] input - # @param [String] url Original URL - # @param [:nokogiri, :rexml] library (nil) - # @param [Boolean] extractAllScripts (false) - # @param [Boolean] profile (nil) Optional priortized profile when loading a single script by type. - # @param [Hash{Symbol => Object}] options - def self.load_html(input, url:, + ## + # Load one or more script tags from an HTML source. + # Unescapes and uncomments input, returns the internal representation + # Yields document base + # @param [String] input + # @param [String] url Original URL + # @param [:nokogiri, :rexml] library (nil) + # @param [Boolean] extractAllScripts (false) + # @param [Boolean] profile (nil) Optional priortized profile when loading a single script by type. + # @param [Hash{Symbol => Object}] options + def self.load_html(input, url:, library: nil, extractAllScripts: false, profile: nil, **options) - if input.is_a?(String) - library ||= begin - require 'nokogiri' - :nokogiri - rescue LoadError - :rexml - end - require "json/ld/html/#{library}" + if input.is_a?(String) + library ||= begin + require 'nokogiri' + :nokogiri + rescue LoadError + :rexml + end + require "json/ld/html/#{library}" - # Parse HTML using the appropriate library - implementation = case library - when :nokogiri then Nokogiri - when :rexml then REXML - end - self.extend(implementation) + # Parse HTML using the appropriate library + implementation = case library + when :nokogiri then Nokogiri + when :rexml then REXML + end + extend(implementation) - input = begin - self.send("initialize_html_#{library}".to_sym, input, **options) - rescue - raise JSON::LD::JsonLdError::LoadingDocumentFailed, "Malformed HTML document: #{$!.message}" - end + input = begin + send("initialize_html_#{library}".to_sym, input, **options) + rescue StandardError + raise JSON::LD::JsonLdError::LoadingDocumentFailed, "Malformed HTML document: #{$ERROR_INFO.message}" + end - # Potentially update options[:base] - if html_base = input.at_xpath("/html/head/base/@href") - base = RDF::URI(url) if url - html_base = RDF::URI(html_base) - html_base = base.join(html_base) if base - yield html_base + # Potentially update options[:base] + if (html_base = input.at_xpath("/html/head/base/@href")) + base = RDF::URI(url) if url + html_base = RDF::URI(html_base) + html_base = base.join(html_base) if base + yield html_base + end end - end - url = RDF::URI.parse(url) - if url.fragment - id = CGI.unescape(url.fragment) - # Find script with an ID based on that fragment. - element = input.at_xpath("//script[@id='#{id}']") - raise JSON::LD::JsonLdError::LoadingDocumentFailed, "No script tag found with id=#{id}" unless element - raise JSON::LD::JsonLdError::LoadingDocumentFailed, "Script tag has type=#{element.attributes['type']}" unless element.attributes['type'].to_s.start_with?('application/ld+json') - content = element.inner_html - validate_input(content, url: url) if options[:validate] - mj_opts = options.keep_if {|k,v| k != :adapter || MUTLI_JSON_ADAPTERS.include?(v)} - MultiJson.load(content, **mj_opts) - elsif extractAllScripts - res = [] - elements = if profile - es = input.xpath("//script[starts-with(@type, 'application/ld+json;profile=#{profile}')]") - # If no profile script, just take a single script without profile - es = [input.at_xpath("//script[starts-with(@type, 'application/ld+json')]")].compact if es.empty? - es - else - input.xpath("//script[starts-with(@type, 'application/ld+json')]") - end - elements.each do |element| + url = RDF::URI.parse(url) + if url.fragment + id = CGI.unescape(url.fragment) + # Find script with an ID based on that fragment. + element = input.at_xpath("//script[@id='#{id}']") + raise JSON::LD::JsonLdError::LoadingDocumentFailed, "No script tag found with id=#{id}" unless element + + unless element.attributes['type'].to_s.start_with?('application/ld+json') + raise JSON::LD::JsonLdError::LoadingDocumentFailed, + "Script tag has type=#{element.attributes['type']}" + end + content = element.inner_html validate_input(content, url: url) if options[:validate] - mj_opts = options.keep_if {|k,v| k != :adapter || MUTLI_JSON_ADAPTERS.include?(v)} - r = MultiJson.load(content, **mj_opts) - if r.is_a?(Hash) - res << r - elsif r.is_a?(Array) - res = res.concat(r) + mj_opts = options.keep_if { |k, v| k != :adapter || MUTLI_JSON_ADAPTERS.include?(v) } + MultiJson.load(content, **mj_opts) + elsif extractAllScripts + res = [] + elements = if profile + es = input.xpath("//script[starts-with(@type, 'application/ld+json;profile=#{profile}')]") + # If no profile script, just take a single script without profile + es = [input.at_xpath("//script[starts-with(@type, 'application/ld+json')]")].compact if es.empty? + es + else + input.xpath("//script[starts-with(@type, 'application/ld+json')]") + end + elements.each do |element| + content = element.inner_html + validate_input(content, url: url) if options[:validate] + mj_opts = options.keep_if { |k, v| k != :adapter || MUTLI_JSON_ADAPTERS.include?(v) } + r = MultiJson.load(content, **mj_opts) + if r.is_a?(Hash) + res << r + elsif r.is_a?(Array) + res.concat(r) + end end + res + else + # Find the first script with type application/ld+json. + element = input.at_xpath("//script[starts-with(@type, 'application/ld+json;profile=#{profile}')]") if profile + element ||= input.at_xpath("//script[starts-with(@type, 'application/ld+json')]") + raise JSON::LD::JsonLdError::LoadingDocumentFailed, "No script tag found" unless element + + content = element.inner_html + validate_input(content, url: url) if options[:validate] + mj_opts = options.keep_if { |k, v| k != :adapter || MUTLI_JSON_ADAPTERS.include?(v) } + MultiJson.load(content, **mj_opts) end - res - else - # Find the first script with type application/ld+json. - element = input.at_xpath("//script[starts-with(@type, 'application/ld+json;profile=#{profile}')]") if profile - element ||= input.at_xpath("//script[starts-with(@type, 'application/ld+json')]") - raise JSON::LD::JsonLdError::LoadingDocumentFailed, "No script tag found" unless element - content = element.inner_html - validate_input(content, url: url) if options[:validate] - mj_opts = options.keep_if {|k,v| k != :adapter || MUTLI_JSON_ADAPTERS.include?(v)} - MultiJson.load(content, **mj_opts) + rescue MultiJson::ParseError => e + raise JSON::LD::JsonLdError::InvalidScriptElement, e.message end - rescue MultiJson::ParseError => e - raise JSON::LD::JsonLdError::InvalidScriptElement, e.message - end - ## - # The default serializer for serialzing Ruby Objects to JSON. - # - # Defaults to `MultiJson.dump` - # - # @param [Object] object - # @param [Array] args - # other arguments that may be passed for some specific implementation. - # @param [Hash] options - # options passed from the invoking context. - # @option options [Object] :serializer_opts (JSON_STATE) - def self.serializer(object, *args, **options) - serializer_opts = options.fetch(:serializer_opts, JSON_STATE) - MultiJson.dump(object, serializer_opts) - end + ## + # The default serializer for serialzing Ruby Objects to JSON. + # + # Defaults to `MultiJson.dump` + # + # @param [Object] object + # @param [Array] args + # other arguments that may be passed for some specific implementation. + # @param [Hash] options + # options passed from the invoking context. + # @option options [Object] :serializer_opts (JSON_STATE) + def self.serializer(object, *_args, **options) + serializer_opts = options.fetch(:serializer_opts, JSON_STATE) + MultiJson.dump(object, serializer_opts) + end - ## - # Validate JSON using JsonLint, if loaded - private - def self.validate_input(input, url:) - return unless defined?(JsonLint) - jsonlint = JsonLint::Linter.new - input = StringIO.new(input) unless input.respond_to?(:read) - unless jsonlint.check_stream(input) - raise JsonLdError::LoadingDocumentFailed, "url: #{url}\n" + jsonlint.errors[''].join("\n") + ## + # Validate JSON using JsonLint, if loaded + + def self.validate_input(input, url:) + return unless defined?(JsonLint) + + jsonlint = JsonLint::Linter.new + input = StringIO.new(input) unless input.respond_to?(:read) + unless jsonlint.check_stream(input) + raise JsonLdError::LoadingDocumentFailed, "url: #{url}\n" + jsonlint.errors[''].join("\n") + end + + input.rewind end - input.rewind - end - ## - # A {RemoteDocument} is returned from a {documentLoader}. - class RemoteDocument - # The final URL of the loaded document. This is important to handle HTTP redirects properly. - # @return [String] - attr_accessor :documentUrl - - # The Content-Type of the loaded document, exclusive of any optional parameters. - # @return [String] - attr_reader :contentType - - # @return [String] - # The URL of a remote context as specified by an HTTP Link header with rel=`http://www.w3.org/ns/json-ld#context` - attr_accessor :contextUrl - - # The parsed retrieved document. - # @return [Array, Hash] - attr_accessor :document - - # The value of any profile parameter retrieved as part of the original contentType. - # @return [String] - attr_accessor :profile - - # @param [RDF::Util::File::RemoteDocument] document - # @param [String] documentUrl - # The final URL of the loaded document. This is important to handle HTTP redirects properly. - # @param [String] contentType - # The Content-Type of the loaded document, exclusive of any optional parameters. - # @param [String] contextUrl - # The URL of a remote context as specified by an HTTP Link header with rel=`http://www.w3.org/ns/json-ld#context` - # @param [String] profile - # The value of any profile parameter retrieved as part of the original contentType. - # @option options [Hash{Symbol => Object}] options - def initialize(document, documentUrl: nil, contentType: nil, contextUrl: nil, profile: nil, **options) - @document = document - @documentUrl = documentUrl || options[:base_uri] - @contentType = contentType || options[:content_type] - @contextUrl = contextUrl - @profile = profile + ## + # A {RemoteDocument} is returned from a {documentLoader}. + class RemoteDocument + # The final URL of the loaded document. This is important to handle HTTP redirects properly. + # @return [String] + attr_accessor :documentUrl + + # The Content-Type of the loaded document, exclusive of any optional parameters. + # @return [String] + attr_reader :contentType + + # @return [String] + # The URL of a remote context as specified by an HTTP Link header with rel=`http://www.w3.org/ns/json-ld#context` + attr_accessor :contextUrl + + # The parsed retrieved document. + # @return [Array, Hash] + attr_accessor :document + + # The value of any profile parameter retrieved as part of the original contentType. + # @return [String] + attr_accessor :profile + + # @param [RDF::Util::File::RemoteDocument] document + # @param [String] documentUrl + # The final URL of the loaded document. This is important to handle HTTP redirects properly. + # @param [String] contentType + # The Content-Type of the loaded document, exclusive of any optional parameters. + # @param [String] contextUrl + # The URL of a remote context as specified by an HTTP Link header with rel=`http://www.w3.org/ns/json-ld#context` + # @param [String] profile + # The value of any profile parameter retrieved as part of the original contentType. + # @option options [Hash{Symbol => Object}] options + def initialize(document, documentUrl: nil, contentType: nil, contextUrl: nil, profile: nil, **options) + @document = document + @documentUrl = documentUrl || options[:base_uri] + @contentType = contentType || options[:content_type] + @contextUrl = contextUrl + @profile = profile + end end end end end - diff --git a/lib/json/ld/compact.rb b/lib/json/ld/compact.rb index 0ec5551e..1b80a660 100644 --- a/lib/json/ld/compact.rb +++ b/lib/json/ld/compact.rb @@ -1,356 +1,356 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true -module JSON::LD - module Compact - include Utils - - # The following constant is used to reduce object allocations in #compact below - CONTAINER_MAPPING_LANGUAGE_INDEX_ID_TYPE = Set.new(%w(@language @index @id @type)).freeze - EXPANDED_PROPERTY_DIRECTION_INDEX_LANGUAGE_VALUE = %w(@direction @index @language @value).freeze - - ## - # This algorithm compacts a JSON-LD document, such that the given context is applied. This must result in shortening any applicable IRIs to terms or compact IRIs, any applicable keywords to keyword aliases, and any applicable JSON-LD values expressed in expanded form to simple values such as strings or numbers. - # - # @param [Array, Hash] element - # @param [String, RDF::URI] base (nil) - # Ensure output objects have keys ordered properly - # @param [String] property (nil) - # Extra validatation - # @return [Array, Hash] - def compact(element, - base: nil, - property: nil, - log_depth: nil) - # log_debug("compact", depth: log_depth.to_i) {"element: #{element.inspect}, ec: #{context.inspect}"} - - # If the term definition for active property itself contains a context, use that for compacting values. - input_context = self.context - - case element - when Array - # log_debug("") {"Array #{element.inspect}"} - result = element.map do |item| - compact(item, base: base, property: property, log_depth: log_depth.to_i + 1) - end.compact - - # If element has a single member and the active property has no - # @container mapping to @list or @set, the compacted value is that - # member; otherwise the compacted value is element - if result.length == 1 && - !context.as_array?(property) && @options[:compactArrays] - # log_debug("=> extract single element", depth: log_depth.to_i) {result.first.inspect} - result.first - else - # log_debug("=> array result", depth: log_depth.to_i) {result.inspect} - result - end - when Hash - # Otherwise element is a JSON object. - # @null objects are used in framing - return nil if element.key?('@null') +module JSON + module LD + module Compact + include Utils + + # The following constant is used to reduce object allocations in #compact below + CONTAINER_MAPPING_LANGUAGE_INDEX_ID_TYPE = Set.new(%w[@language @index @id @type]).freeze + EXPANDED_PROPERTY_DIRECTION_INDEX_LANGUAGE_VALUE = %w[@direction @index @language @value].freeze + + ## + # This algorithm compacts a JSON-LD document, such that the given context is applied. This must result in shortening any applicable IRIs to terms or compact IRIs, any applicable keywords to keyword aliases, and any applicable JSON-LD values expressed in expanded form to simple values such as strings or numbers. + # + # @param [Array, Hash] element + # @param [String, RDF::URI] base (nil) + # Ensure output objects have keys ordered properly + # @param [String] property (nil) + # Extra validatation + # @return [Array, Hash] + def compact(element, + base: nil, + property: nil, + log_depth: nil) + # log_debug("compact", depth: log_depth.to_i) {"element: #{element.inspect}, ec: #{context.inspect}"} + + # If the term definition for active property itself contains a context, use that for compacting values. + input_context = context + + case element + when Array + # log_debug("") {"Array #{element.inspect}"} + result = element.map do |item| + compact(item, base: base, property: property, log_depth: log_depth.to_i + 1) + end.compact + + # If element has a single member and the active property has no + # @container mapping to @list or @set, the compacted value is that + # member; otherwise the compacted value is element + if result.length == 1 && + !context.as_array?(property) && @options[:compactArrays] + # log_debug("=> extract single element", depth: log_depth.to_i) {result.first.inspect} + result.first + else + # log_debug("=> array result", depth: log_depth.to_i) {result.inspect} + result + end + when Hash + # Otherwise element is a JSON object. - # Revert any previously type-scoped (non-preserved) context - if context.previous_context && !element.key?('@value') && element.keys != %w(@id) - # log_debug("revert ec", depth: log_depth.to_i) {"previous context: #{context.previous_context.inspect}"} - self.context = context.previous_context - end + # @null objects are used in framing + return nil if element.key?('@null') - # Look up term definintions from property using the original type-scoped context, if it exists, but apply them to the now current previous context - td = input_context.term_definitions[property] if property - if td && !td.context.nil? - self.context = context.parse(td.context, - override_protected: true) - # log_debug("prop-scoped", depth: log_depth.to_i) {"context: #{self.context.inspect}"} - end + # Revert any previously type-scoped (non-preserved) context + if context.previous_context && !element.key?('@value') && element.keys != %w[@id] + # log_debug("revert ec", depth: log_depth.to_i) {"previous context: #{context.previous_context.inspect}"} + self.context = context.previous_context + end - if (element.key?('@id') || element.key?('@value')) && !element.key?('@annotation') - result = context.compact_value(property, element, base: @options[:base]) - if !result.is_a?(Hash) || context.coerce(property) == '@json' - # log_debug("", depth: log_depth.to_i) {"=> scalar result: #{result.inspect}"} - return result + # Look up term definintions from property using the original type-scoped context, if it exists, but apply them to the now current previous context + td = input_context.term_definitions[property] if property + if td && !td.context.nil? + self.context = context.parse(td.context, + override_protected: true) + # log_debug("prop-scoped", depth: log_depth.to_i) {"context: #{self.context.inspect}"} end - end - # If expanded property is @list and we're contained within a list container, recursively compact this item to an array - if list?(element) && context.container(property).include?('@list') - return compact(element['@list'], base: base, - property: property, - log_depth: log_depth.to_i + 1) - end + if (element.key?('@id') || element.key?('@value')) && !element.key?('@annotation') + result = context.compact_value(property, element, base: @options[:base]) + if !result.is_a?(Hash) || context.coerce(property) == '@json' + # log_debug("", depth: log_depth.to_i) {"=> scalar result: #{result.inspect}"} + return result + end + end - inside_reverse = property == '@reverse' - result, nest_result = {}, nil - - # Apply any context defined on an alias of @type - # If key is @type and any compacted value is a term having a local context, overlay that context. - Array(element['@type']). - map {|expanded_type| context.compact_iri(expanded_type, vocab: true)}. - sort. - each do |term| - term_context = input_context.term_definitions[term].context if input_context.term_definitions[term] - self.context = context.parse(term_context, propagate: false) unless term_context.nil? - # log_debug("type-scoped", depth: log_depth.to_i) {"context: #{self.context.inspect}"} - end + # If expanded property is @list and we're contained within a list container, recursively compact this item to an array + if list?(element) && context.container(property).include?('@list') + return compact(element['@list'], base: base, + property: property, + log_depth: log_depth.to_i + 1) + end - element.keys.opt_sort(ordered: @options[:ordered]).each do |expanded_property| - expanded_value = element[expanded_property] - # log_debug("", depth: log_depth.to_i) {"#{expanded_property}: #{expanded_value.inspect}"} - - if expanded_property == '@id' - compacted_value = as_array(expanded_value).map do |expanded_id| - if node?(expanded_id) && @options[:rdfstar] - # This can only really happen for valid RDF-star - compact(expanded_id, base: base, - property: '@id', - log_depth: log_depth.to_i + 1) - else - context.compact_iri(expanded_id, base: @options[:base]) + inside_reverse = property == '@reverse' + result = {} + nest_result = nil + + # Apply any context defined on an alias of @type + # If key is @type and any compacted value is a term having a local context, overlay that context. + Array(element['@type']) + .map { |expanded_type| context.compact_iri(expanded_type, vocab: true) } + .sort + .each do |term| + term_context = input_context.term_definitions[term].context if input_context.term_definitions[term] + self.context = context.parse(term_context, propagate: false) unless term_context.nil? + # log_debug("type-scoped", depth: log_depth.to_i) {"context: #{self.context.inspect}"} + end + + element.keys.opt_sort(ordered: @options[:ordered]).each do |expanded_property| + expanded_value = element[expanded_property] + # log_debug("", depth: log_depth.to_i) {"#{expanded_property}: #{expanded_value.inspect}"} + + if expanded_property == '@id' + compacted_value = as_array(expanded_value).map do |expanded_id| + if node?(expanded_id) && @options[:rdfstar] + # This can only really happen for valid RDF-star + compact(expanded_id, base: base, + property: '@id', + log_depth: log_depth.to_i + 1) + else + context.compact_iri(expanded_id, base: @options[:base]) + end end + + kw_alias = context.compact_iri('@id', vocab: true) + as_array = compacted_value.length > 1 + compacted_value = compacted_value.first unless as_array + result[kw_alias] = compacted_value + next end - kw_alias = context.compact_iri('@id', vocab: true) - as_array = compacted_value.length > 1 - compacted_value = compacted_value.first unless as_array - result[kw_alias] = compacted_value - next - end + if expanded_property == '@type' + compacted_value = Array(expanded_value).map do |expanded_type| + input_context.compact_iri(expanded_type, vocab: true) + end - if expanded_property == '@type' - compacted_value = Array(expanded_value).map do |expanded_type| - input_context.compact_iri(expanded_type, vocab: true) + kw_alias = context.compact_iri('@type', vocab: true) + as_array = compacted_value.length > 1 || + (context.as_array?(kw_alias) && + !value?(element) && + context.processingMode('json-ld-1.1')) + add_value(result, kw_alias, compacted_value, property_is_array: as_array) + next end - kw_alias = context.compact_iri('@type', vocab: true) - as_array = compacted_value.length > 1 || - (context.as_array?(kw_alias) && - !value?(element) && - context.processingMode('json-ld-1.1')) - add_value(result, kw_alias, compacted_value, property_is_array: as_array) - next - end + if expanded_property == '@reverse' + compacted_value = compact(expanded_value, base: base, + property: '@reverse', + log_depth: log_depth.to_i + 1) + # log_debug("@reverse", depth: log_depth.to_i) {"compacted_value: #{compacted_value.inspect}"} + # handle double-reversed properties + compacted_value.each do |prop, value| + next unless context.reverse?(prop) - if expanded_property == '@reverse' - compacted_value = compact(expanded_value, base: base, - property: '@reverse', - log_depth: log_depth.to_i + 1) - # log_debug("@reverse", depth: log_depth.to_i) {"compacted_value: #{compacted_value.inspect}"} - # handle double-reversed properties - compacted_value.each do |prop, value| - if context.reverse?(prop) add_value(result, prop, value, property_is_array: context.as_array?(prop) || !@options[:compactArrays]) compacted_value.delete(prop) end - end - unless compacted_value.empty? - al = context.compact_iri('@reverse', vocab: true) - # log_debug("", depth: log_depth.to_i) {"remainder: #{al} => #{compacted_value.inspect}"} - result[al] = compacted_value + unless compacted_value.empty? + al = context.compact_iri('@reverse', vocab: true) + # log_debug("", depth: log_depth.to_i) {"remainder: #{al} => #{compacted_value.inspect}"} + result[al] = compacted_value + end + next end - next - end - if expanded_property == '@preserve' - # Compact using `property` - compacted_value = compact(expanded_value, base: base, - property: property, - log_depth: log_depth.to_i + 1) - # log_debug("@preserve", depth: log_depth.to_i) {"compacted_value: #{compacted_value.inspect}"} + if expanded_property == '@preserve' + # Compact using `property` + compacted_value = compact(expanded_value, base: base, + property: property, + log_depth: log_depth.to_i + 1) + # log_debug("@preserve", depth: log_depth.to_i) {"compacted_value: #{compacted_value.inspect}"} - unless compacted_value.is_a?(Array) && compacted_value.empty? - result['@preserve'] = compacted_value + result['@preserve'] = compacted_value unless compacted_value.is_a?(Array) && compacted_value.empty? + next end - next - end - - if expanded_property == '@index' && context.container(property).include?('@index') - # log_debug("@index", depth: log_depth.to_i) {"drop @index"} - next - end - - # Otherwise, if expanded property is @direction, @index, @value, or @language: - if EXPANDED_PROPERTY_DIRECTION_INDEX_LANGUAGE_VALUE.include?(expanded_property) - al = context.compact_iri(expanded_property, vocab: true) - # log_debug(expanded_property, depth: log_depth.to_i) {"#{al} => #{expanded_value.inspect}"} - result[al] = expanded_value - next - end - if expanded_value.empty? - item_active_property = - context.compact_iri(expanded_property, - value: expanded_value, - vocab: true, - reverse: inside_reverse) - - if nest_prop = context.nest(item_active_property) - result[nest_prop] ||= {} - add_value(result[nest_prop], item_active_property, [], - property_is_array: true) - else - add_value(result, item_active_property, [], - property_is_array: true) + if expanded_property == '@index' && context.container(property).include?('@index') + # log_debug("@index", depth: log_depth.to_i) {"drop @index"} + next end - end - - # At this point, expanded value must be an array due to the Expansion algorithm. - expanded_value.each do |expanded_item| - item_active_property = - context.compact_iri(expanded_property, - value: expanded_item, - vocab: true, - reverse: inside_reverse) + # Otherwise, if expanded property is @direction, @index, @value, or @language: + if EXPANDED_PROPERTY_DIRECTION_INDEX_LANGUAGE_VALUE.include?(expanded_property) + al = context.compact_iri(expanded_property, vocab: true) + # log_debug(expanded_property, depth: log_depth.to_i) {"#{al} => #{expanded_value.inspect}"} + result[al] = expanded_value + next + end - nest_result = if nest_prop = context.nest(item_active_property) - # FIXME??: It's possible that nest_prop will be used both for nesting, and for values of @nest - result[nest_prop] ||= {} - else - result + if expanded_value.empty? + item_active_property = + context.compact_iri(expanded_property, + value: expanded_value, + vocab: true, + reverse: inside_reverse) + + if (nest_prop = context.nest(item_active_property)) + result[nest_prop] ||= {} + add_value(result[nest_prop], item_active_property, [], + property_is_array: true) + else + add_value(result, item_active_property, [], + property_is_array: true) + end end - container = context.container(item_active_property) - as_array = !@options[:compactArrays] || context.as_array?(item_active_property) + # At this point, expanded value must be an array due to the Expansion algorithm. + expanded_value.each do |expanded_item| + item_active_property = + context.compact_iri(expanded_property, + value: expanded_item, + vocab: true, + reverse: inside_reverse) + + nest_result = if (nest_prop = context.nest(item_active_property)) + # FIXME??: It's possible that nest_prop will be used both for nesting, and for values of @nest + result[nest_prop] ||= {} + else + result + end - value = case - when list?(expanded_item) then expanded_item['@list'] - when graph?(expanded_item) then expanded_item['@graph'] - else expanded_item - end + container = context.container(item_active_property) + as_array = !@options[:compactArrays] || context.as_array?(item_active_property) - compacted_item = compact(value, base: base, - property: item_active_property, - log_depth: log_depth.to_i + 1) - # log_debug("", depth: log_depth.to_i) {" => compacted key: #{item_active_property.inspect} for #{compacted_item.inspect}"} - - # handle @list - if list?(expanded_item) - compacted_item = as_array(compacted_item) - unless container.include?('@list') - al = context.compact_iri('@list', vocab: true) - compacted_item = {al => compacted_item} - if expanded_item.key?('@index') - key = context.compact_iri('@index', vocab: true) - compacted_item[key] = expanded_item['@index'] - end + value = if list?(expanded_item) + expanded_item['@list'] + elsif graph?(expanded_item) + expanded_item['@graph'] else - add_value(nest_result, item_active_property, compacted_item, - value_is_array: true, allow_duplicate: true) - next + expanded_item end - end - # Graph object compaction cases: - if graph?(expanded_item) - if container.include?('@graph') && - (container.include?('@id') || container.include?('@index') && simple_graph?(expanded_item)) - # container includes @graph and @id - map_object = nest_result[item_active_property] ||= {} - # If there is no @id, create a blank node identifier to use as an index - map_key = if container.include?('@id') && expanded_item['@id'] - context.compact_iri(expanded_item['@id'], base: @options[:base]) - elsif container.include?('@index') && expanded_item['@index'] - context.compact_iri(expanded_item['@index'], vocab: true) + compacted_item = compact(value, base: base, + property: item_active_property, + log_depth: log_depth.to_i + 1) + # log_debug("", depth: log_depth.to_i) {" => compacted key: #{item_active_property.inspect} for #{compacted_item.inspect}"} + + # handle @list + if list?(expanded_item) + compacted_item = as_array(compacted_item) + if container.include?('@list') + add_value(nest_result, item_active_property, compacted_item, + value_is_array: true, allow_duplicate: true) + next else - context.compact_iri('@none', vocab: true) - end - add_value(map_object, map_key, compacted_item, - property_is_array: as_array) - elsif container.include?('@graph') && simple_graph?(expanded_item) - # container includes @graph but not @id or @index and value is a simple graph object - if compacted_item.is_a?(Array) && compacted_item.length > 1 - # Mutple objects in the same graph can't be represented directly, as they would be interpreted as two different graphs. Need to wrap in @included. - included_key = context.compact_iri('@included', vocab: true) - compacted_item = {included_key => compacted_item} - end - # Drop through, where compacted_item will be added - add_value(nest_result, item_active_property, compacted_item, - property_is_array: as_array) - else - # container does not include @graph or otherwise does not match one of the previous cases, redo compacted_item - al = context.compact_iri('@graph', vocab: true) - compacted_item = {al => compacted_item} - if expanded_item['@id'] - al = context.compact_iri('@id', vocab: true) - compacted_item[al] = context.compact_iri(expanded_item['@id'], vocab: false) - end - if expanded_item.key?('@index') - key = context.compact_iri('@index', vocab: true) - compacted_item[key] = expanded_item['@index'] + al = context.compact_iri('@list', vocab: true) + compacted_item = { al => compacted_item } + if expanded_item.key?('@index') + key = context.compact_iri('@index', vocab: true) + compacted_item[key] = expanded_item['@index'] + end end - add_value(nest_result, item_active_property, compacted_item, - property_is_array: as_array) end - elsif container.intersect?(CONTAINER_MAPPING_LANGUAGE_INDEX_ID_TYPE) && !container.include?('@graph') - map_object = nest_result[item_active_property] ||= {} - c = container.first - container_key = context.compact_iri(c, vocab: true) - compacted_item = case - when container.include?('@id') - map_key = compacted_item[container_key] - compacted_item.delete(container_key) - compacted_item - when container.include?('@index') - index_key = context.term_definitions[item_active_property].index || '@index' - if index_key == '@index' - map_key = expanded_item['@index'] - else - index_key = context.expand_iri(index_key, vocab: true) - container_key = context.compact_iri(index_key, vocab: true) - map_key, *others = Array(compacted_item[container_key]) - if map_key.is_a?(String) - case others.length - when 0 then compacted_item.delete(container_key) - when 1 then compacted_item[container_key] = others.first - else compacted_item[container_key] = others - end + + # Graph object compaction cases: + if graph?(expanded_item) + if container.include?('@graph') && + (container.include?('@id') || (container.include?('@index') && simple_graph?(expanded_item))) + # container includes @graph and @id + map_object = nest_result[item_active_property] ||= {} + # If there is no @id, create a blank node identifier to use as an index + map_key = if container.include?('@id') && expanded_item['@id'] + context.compact_iri(expanded_item['@id'], base: @options[:base]) + elsif container.include?('@index') && expanded_item['@index'] + context.compact_iri(expanded_item['@index'], vocab: true) else - map_key = context.compact_iri('@none', vocab: true) + context.compact_iri('@none', vocab: true) end + add_value(map_object, map_key, compacted_item, + property_is_array: as_array) + elsif container.include?('@graph') && simple_graph?(expanded_item) + # container includes @graph but not @id or @index and value is a simple graph object + if compacted_item.is_a?(Array) && compacted_item.length > 1 + # Mutple objects in the same graph can't be represented directly, as they would be interpreted as two different graphs. Need to wrap in @included. + included_key = context.compact_iri('@included', vocab: true) + compacted_item = { included_key => compacted_item } + end + # Drop through, where compacted_item will be added + add_value(nest_result, item_active_property, compacted_item, + property_is_array: as_array) + else + # container does not include @graph or otherwise does not match one of the previous cases, redo compacted_item + al = context.compact_iri('@graph', vocab: true) + compacted_item = { al => compacted_item } + if expanded_item['@id'] + al = context.compact_iri('@id', vocab: true) + compacted_item[al] = context.compact_iri(expanded_item['@id'], vocab: false) + end + if expanded_item.key?('@index') + key = context.compact_iri('@index', vocab: true) + compacted_item[key] = expanded_item['@index'] + end + add_value(nest_result, item_active_property, compacted_item, + property_is_array: as_array) end - # Note, if compacted_item is a node reference and key is @id-valued, then this could be compacted further. - compacted_item - when container.include?('@language') - map_key = expanded_item['@language'] - value?(expanded_item) ? expanded_item['@value'] : compacted_item - when container.include?('@type') - map_key, *types = Array(compacted_item[container_key]) - case types.length - when 0 then compacted_item.delete(container_key) - when 1 then compacted_item[container_key] = types.first - else compacted_item[container_key] = types - end + elsif container.intersect?(CONTAINER_MAPPING_LANGUAGE_INDEX_ID_TYPE) && !container.include?('@graph') + map_object = nest_result[item_active_property] ||= {} + c = container.first + container_key = context.compact_iri(c, vocab: true) + compacted_item = if container.include?('@id') + map_key = compacted_item[container_key] + compacted_item.delete(container_key) + compacted_item + elsif container.include?('@index') + index_key = context.term_definitions[item_active_property].index || '@index' + if index_key == '@index' + map_key = expanded_item['@index'] + else + index_key = context.expand_iri(index_key, vocab: true) + container_key = context.compact_iri(index_key, vocab: true) + map_key, *others = Array(compacted_item[container_key]) + if map_key.is_a?(String) + case others.length + when 0 then compacted_item.delete(container_key) + when 1 then compacted_item[container_key] = others.first + else compacted_item[container_key] = others + end + else + map_key = context.compact_iri('@none', vocab: true) + end + end + # Note, if compacted_item is a node reference and key is @id-valued, then this could be compacted further. + compacted_item + elsif container.include?('@language') + map_key = expanded_item['@language'] + value?(expanded_item) ? expanded_item['@value'] : compacted_item + elsif container.include?('@type') + map_key, *types = Array(compacted_item[container_key]) + case types.length + when 0 then compacted_item.delete(container_key) + when 1 then compacted_item[container_key] = types.first + else compacted_item[container_key] = types + end - # if compacted_item contains a single entry who's key maps to @id, then recompact the item without @type - if compacted_item.keys.length == 1 && expanded_item.keys.include?('@id') - compacted_item = compact({'@id' => expanded_item['@id']}, - base: base, - property: item_active_property, - log_depth: log_depth.to_i + 1) + # if compacted_item contains a single entry who's key maps to @id, then recompact the item without @type + if compacted_item.keys.length == 1 && expanded_item.key?('@id') + compacted_item = compact({ '@id' => expanded_item['@id'] }, + base: base, + property: item_active_property, + log_depth: log_depth.to_i + 1) + end + compacted_item end - compacted_item + map_key ||= context.compact_iri('@none', vocab: true) + add_value(map_object, map_key, compacted_item, + property_is_array: as_array) + else + add_value(nest_result, item_active_property, compacted_item, + property_is_array: as_array) end - map_key ||= context.compact_iri('@none', vocab: true) - add_value(map_object, map_key, compacted_item, - property_is_array: as_array) - else - add_value(nest_result, item_active_property, compacted_item, - property_is_array: as_array) end end - end - result - else - # For other types, the compacted value is the element value - # log_debug("compact", depth: log_depth.to_i) {element.class.to_s} - element + result + else + # For other types, the compacted value is the element value + # log_debug("compact", depth: log_depth.to_i) {element.class.to_s} + element + end + ensure + self.context = input_context end - - ensure - self.context = input_context end end end diff --git a/lib/json/ld/conneg.rb b/lib/json/ld/conneg.rb index b243450f..a2288ae8 100644 --- a/lib/json/ld/conneg.rb +++ b/lib/json/ld/conneg.rb @@ -1,188 +1,206 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true + +require 'English' + require 'rack' require 'link_header' -module JSON::LD - ## - # Rack middleware for JSON-LD content negotiation. - # - # Uses HTTP Content Negotiation to serialize `Array` and `Hash` results as JSON-LD using 'profile' accept-params to invoke appropriate JSON-LD API methods. - # - # Allows black-listing and white-listing of two-part profiles where the second part denotes a URL of a _context_ or _frame_. (See {JSON::LD::Writer.accept?}) - # - # Works along with `rack-linkeddata` for serializing data which is not in the form of an `RDF::Repository`. - # - # - # @example - # use JSON::LD::Rack - # - # @see https://www.w3.org/TR/json-ld11/#iana-considerations - # @see https://www.rubydoc.info/github/rack/rack/master/file/SPEC - class ContentNegotiation - VARY = {'Vary' => 'Accept'}.freeze - - # @return [#call] - attr_reader :app - +module JSON + module LD ## - # * Registers JSON::LD::Rack, suitable for Sinatra application - # * adds helpers + # Rack middleware for JSON-LD content negotiation. # - # @param [Sinatra::Base] app - # @return [void] - def self.registered(app) - options = {} - app.use(JSON::LD::Rack, **options) - end + # Uses HTTP Content Negotiation to serialize `Array` and `Hash` results as JSON-LD using 'profile' accept-params to invoke appropriate JSON-LD API methods. + # + # Allows black-listing and white-listing of two-part profiles where the second part denotes a URL of a _context_ or _frame_. (See {JSON::LD::Writer.accept?}) + # + # Works along with `rack-linkeddata` for serializing data which is not in the form of an `RDF::Repository`. + # + # + # @example + # use JSON::LD::Rack + # + # @see https://www.w3.org/TR/json-ld11/#iana-considerations + # @see https://www.rubydoc.info/github/rack/rack/master/file/SPEC + class ContentNegotiation + VARY = { 'Vary' => 'Accept' }.freeze + + # @return [#call] + attr_reader :app + + ## + # * Registers JSON::LD::Rack, suitable for Sinatra application + # * adds helpers + # + # @param [Sinatra::Base] app + # @return [void] + def self.registered(app) + options = {} + app.use(JSON::LD::Rack, **options) + end - def initialize(app) - @app = app - end + def initialize(app) + @app = app + end - ## - # Handles a Rack protocol request. - # Parses Accept header to find appropriate mime-type and sets content_type accordingly. - # - # @param [Hash{String => String}] env - # @return [Array(Integer, Hash, #each)] Status, Headers and Body - # @see https://rubydoc.info/github/rack/rack/file/SPEC - def call(env) - response = app.call(env) - body = response[2].respond_to?(:body) ? response[2].body : response[2] - case body + ## + # Handles a Rack protocol request. + # Parses Accept header to find appropriate mime-type and sets content_type accordingly. + # + # @param [Hash{String => String}] env + # @return [Array(Integer, Hash, #each)] Status, Headers and Body + # @see https://rubydoc.info/github/rack/rack/file/SPEC + def call(env) + response = app.call(env) + body = response[2].respond_to?(:body) ? response[2].body : response[2] + case body when Array, Hash - response[2] = body # Put it back in the response, it might have been a proxy + response[2] = body # Put it back in the response, it might have been a proxy serialize(env, *response) else response - end - end - - ## - # Serializes objects as JSON-LD. Defaults to expanded form, other forms - # determined by presense of `profile` in accept-parms. - # - # @param [Hash{String => String}] env - # @param [Integer] status - # @param [Hash{String => Object}] headers - # @param [RDF::Enumerable] body - # @return [Array(Integer, Hash, #each)] Status, Headers and Body - def serialize(env, status, headers, body) - # This will only return json-ld content types, possibly with parameters - content_types = parse_accept_header(env['HTTP_ACCEPT'] || 'application/ld+json') - content_types = content_types.select do |content_type| - _, *params = content_type.split(';').map(&:strip) - accept_params = params.inject({}) do |memo, pv| - p, v = pv.split('=').map(&:strip) - memo.merge(p.downcase.to_sym => v.sub(/^["']?([^"']*)["']?$/, '\1')) end - JSON::LD::Writer.accept?(accept_params) end - if content_types.empty? - not_acceptable("No appropriate combinaion of media-type and parameters found") - else - ct, *params = content_types.first.split(';').map(&:strip) - accept_params = params.inject({}) do |memo, pv| - p, v = pv.split('=').map(&:strip) - memo.merge(p.downcase.to_sym => v.sub(/^["']?([^"']*)["']?$/, '\1')) - end - - # Determine API method from profile - profile = accept_params[:profile].to_s.split(' ') - - # Get context from Link header - links = LinkHeader.parse(env['HTTP_LINK']) - context = links.find_link(['rel', JSON_LD_NS+"context"]).href rescue nil - frame = links.find_link(['rel', JSON_LD_NS+"frame"]).href rescue nil - if profile.include?(JSON_LD_NS+"framed") && frame.nil? - return not_acceptable("framed profile without a frame") + ## + # Serializes objects as JSON-LD. Defaults to expanded form, other forms + # determined by presense of `profile` in accept-parms. + # + # @param [Hash{String => String}] env + # @param [Integer] status + # @param [Hash{String => Object}] headers + # @param [RDF::Enumerable] body + # @return [Array(Integer, Hash, #each)] Status, Headers and Body + def serialize(env, status, headers, body) + # This will only return json-ld content types, possibly with parameters + content_types = parse_accept_header(env['HTTP_ACCEPT'] || 'application/ld+json') + content_types = content_types.select do |content_type| + _, *params = content_type.split(';').map(&:strip) + accept_params = params.inject({}) do |memo, pv| + p, v = pv.split('=').map(&:strip) + memo.merge(p.downcase.to_sym => v.sub(/^["']?([^"']*)["']?$/, '\1')) + end + JSON::LD::Writer.accept?(accept_params) end - - # accept? already determined that there are appropriate contexts - # If profile also includes a URI which is not a namespace, use it for compaction. - context ||= Writer.default_context if profile.include?(JSON_LD_NS+"compacted") - - result = if profile.include?(JSON_LD_NS+"flattened") - API.flatten(body, context) - elsif profile.include?(JSON_LD_NS+"framed") - API.frame(body, frame) - elsif context - API.compact(body, context) - elsif profile.include?(JSON_LD_NS+"expanded") - API.expand(body) + if content_types.empty? + not_acceptable("No appropriate combinaion of media-type and parameters found") else - body + ct, *params = content_types.first.split(';').map(&:strip) + accept_params = params.inject({}) do |memo, pv| + p, v = pv.split('=').map(&:strip) + memo.merge(p.downcase.to_sym => v.sub(/^["']?([^"']*)["']?$/, '\1')) + end + + # Determine API method from profile + profile = accept_params[:profile].to_s.split + + # Get context from Link header + links = LinkHeader.parse(env['HTTP_LINK']) + context = begin + links.find_link(['rel', JSON_LD_NS + "context"]).href + rescue StandardError + nil + end + frame = begin + links.find_link(['rel', JSON_LD_NS + "frame"]).href + rescue StandardError + nil + end + + if profile.include?(JSON_LD_NS + "framed") && frame.nil? + return not_acceptable("framed profile without a frame") + end + + # accept? already determined that there are appropriate contexts + # If profile also includes a URI which is not a namespace, use it for compaction. + context ||= Writer.default_context if profile.include?(JSON_LD_NS + "compacted") + + result = if profile.include?(JSON_LD_NS + "flattened") + API.flatten(body, context) + elsif profile.include?(JSON_LD_NS + "framed") + API.frame(body, frame) + elsif context + API.compact(body, context) + elsif profile.include?(JSON_LD_NS + "expanded") + API.expand(body) + else + body + end + + headers = headers.merge(VARY).merge('Content-Type' => ct) + [status, headers, [result.to_json]] end - - headers = headers.merge(VARY).merge('Content-Type' => ct) - [status, headers, [result.to_json]] + rescue StandardError + http_error(500, $ERROR_INFO.message) end - rescue - http_error(500, $!.message) - end - protected + protected + + ## + # Parses an HTTP `Accept` header, returning an array of MIME content + # types ordered by the precedence rules defined in HTTP/1.1 §14.1. + # + # @param [String, #to_s] header + # @return [Array] + # @see http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1 + def parse_accept_header(header) + entries = header.to_s.split(',') + entries = entries + .map { |e| accept_entry(e) } + .sort_by(&:last) + .map(&:first) + entries.map { |e| find_content_type_for_media_range(e) }.compact + end - ## - # Parses an HTTP `Accept` header, returning an array of MIME content - # types ordered by the precedence rules defined in HTTP/1.1 §14.1. - # - # @param [String, #to_s] header - # @return [Array] - # @see http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1 - def parse_accept_header(header) - entries = header.to_s.split(',') - entries = entries. - map { |e| accept_entry(e) }. - sort_by(&:last). - map(&:first) - entries.map { |e| find_content_type_for_media_range(e) }.compact - end + # Returns an array of quality, number of '*' in content-type, and number of non-'q' parameters + def accept_entry(entry) + type, *options = entry.split(';').map(&:strip) + quality = 0 # we sort smallest first + options.delete_if { |e| quality = 1 - e[2..].to_f if e.start_with? 'q=' } + [options.unshift(type).join(';'), [quality, type.count('*'), 1 - options.size]] + end - # Returns an array of quality, number of '*' in content-type, and number of non-'q' parameters - def accept_entry(entry) - type, *options = entry.split(';').map(&:strip) - quality = 0 # we sort smallest first - options.delete_if { |e| quality = 1 - e[2..-1].to_f if e.start_with? 'q=' } - [options.unshift(type).join(';'), [quality, type.count('*'), 1 - options.size]] - end + ## + # Returns a content type appropriate for the given `media_range`, + # returns `nil` if `media_range` contains a wildcard subtype + # that is not mapped. + # + # @param [String, #to_s] media_range + # @return [String, nil] + def find_content_type_for_media_range(media_range) + media_range = media_range.sub('*/*', 'application/ld+json') if media_range.to_s.start_with?('*/*') + if media_range.to_s.start_with?('application/*') + media_range = media_range.sub('application/*', + 'application/ld+json') + end + if media_range.to_s.start_with?('application/json') + media_range = media_range.sub('application/json', + 'application/ld+json') + end - ## - # Returns a content type appropriate for the given `media_range`, - # returns `nil` if `media_range` contains a wildcard subtype - # that is not mapped. - # - # @param [String, #to_s] media_range - # @return [String, nil] - def find_content_type_for_media_range(media_range) - media_range = media_range.sub('*/*', 'application/ld+json') if media_range.to_s.start_with?('*/*') - media_range = media_range.sub('application/*', 'application/ld+json') if media_range.to_s.start_with?('application/*') - media_range = media_range.sub('application/json', 'application/ld+json') if media_range.to_s.start_with?('application/json') - - media_range.start_with?('application/ld+json') ? media_range : nil - end + media_range.start_with?('application/ld+json') ? media_range : nil + end - ## - # Outputs an HTTP `406 Not Acceptable` response. - # - # @param [String, #to_s] message - # @return [Array(Integer, Hash, #each)] - def not_acceptable(message = nil) - http_error(406, message, VARY) - end + ## + # Outputs an HTTP `406 Not Acceptable` response. + # + # @param [String, #to_s] message + # @return [Array(Integer, Hash, #each)] + def not_acceptable(message = nil) + http_error(406, message, VARY) + end - ## - # Outputs an HTTP `4xx` or `5xx` response. - # - # @param [Integer, #to_i] code - # @param [String, #to_s] message - # @param [Hash{String => String}] headers - # @return [Array(Integer, Hash, #each)] - def http_error(code, message = nil, headers = {}) - message = [code, Rack::Utils::HTTP_STATUS_CODES[code]].join(' ') + - (message.nil? ? "\n" : " (#{message})\n") - [code, {'Content-Type' => "text/plain"}.merge(headers), [message]] + ## + # Outputs an HTTP `4xx` or `5xx` response. + # + # @param [Integer, #to_i] code + # @param [String, #to_s] message + # @param [Hash{String => String}] headers + # @return [Array(Integer, Hash, #each)] + def http_error(code, message = nil, headers = {}) + message = [code, Rack::Utils::HTTP_STATUS_CODES[code]].join(' ') + + (message.nil? ? "\n" : " (#{message})\n") + [code, { 'Content-Type' => "text/plain" }.merge(headers), [message]] + end end end end diff --git a/lib/json/ld/context.rb b/lib/json/ld/context.rb index 868a14b5..db3329b5 100644 --- a/lib/json/ld/context.rb +++ b/lib/json/ld/context.rb @@ -1,2191 +1,2358 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true + require 'json' require 'bigdecimal' require 'set' require 'rdf/util/cache' -module JSON::LD - class Context - include Utils - include RDF::Util::Logger - - ## - # Preloaded contexts. - # To avoid runtime context parsing and downloading, contexts may be pre-loaded by implementations. - # @return [Hash{Symbol => Context}] - PRELOADED = {} +module JSON + module LD + class Context + include Utils + include RDF::Util::Logger - # Initial contexts, defined on first access - INITIAL_CONTEXTS = {} + ## + # Preloaded contexts. + # To avoid runtime context parsing and downloading, contexts may be pre-loaded by implementations. + # @return [Hash{Symbol => Context}] + PRELOADED = {} - ## - # Defines the maximum number of interned URI references that can be held - # cached in memory at any one time. - CACHE_SIZE = 100 # unlimited by default + # Initial contexts, defined on first access + INITIAL_CONTEXTS = {} - class << self ## - # Add preloaded context. In the block form, the context is lazy evaulated on first use. - # @param [String, RDF::URI] url - # @param [Context] context (nil) - # @yieldreturn [Context] - def add_preloaded(url, context = nil, &block) - PRELOADED[url.to_s.freeze] = context || block + # Defines the maximum number of interned URI references that can be held + # cached in memory at any one time. + CACHE_SIZE = 100 # unlimited by default + + class << self + ## + # Add preloaded context. In the block form, the context is lazy evaulated on first use. + # @param [String, RDF::URI] url + # @param [Context] context (nil) + # @yieldreturn [Context] + def add_preloaded(url, context = nil, &block) + PRELOADED[url.to_s.freeze] = context || block + end + + ## + # Alias a previousliy loaded context + # @param [String, RDF::URI] a + # @param [String, RDF::URI] url + def alias_preloaded(a, url) + PRELOADED[a.to_s.freeze] = PRELOADED[url.to_s.freeze] + end end - ## - # Alias a previousliy loaded context - # @param [String, RDF::URI] a - # @param [String, RDF::URI] url - def alias_preloaded(a, url) - PRELOADED[a.to_s.freeze] = PRELOADED[url.to_s.freeze] + begin + # Attempt to load this to avoid unnecessary context fetches + require 'json/ld/preloaded' + rescue LoadError + # Silently allow this to fail end - end - begin - # Attempt to load this to avoid unnecessary context fetches - require 'json/ld/preloaded' - rescue LoadError - # Silently allow this to fail - end + # The base. + # + # @return [RDF::URI] Current base IRI, used for expanding relative IRIs. + attr_reader :base + + # @return [RDF::URI] base IRI of the context, if loaded remotely. + attr_accessor :context_base + + # Term definitions + # @return [Hash{String => TermDefinition}] + attr_reader :term_definitions + + # @return [Hash{RDF::URI => String}] Reverse mappings from IRI to term only for terms, not CURIEs XXX + attr_accessor :iri_to_term + + # Previous definition for this context. This is used for rolling back type-scoped contexts. + # @return [Context] + attr_accessor :previous_context + + # Context is property-scoped + # @return [Boolean] + attr_accessor :property_scoped + + # Default language + # + # This adds a language to plain strings that aren't otherwise coerced + # @return [String] + attr_reader :default_language - # The base. - # - # @return [RDF::URI] Current base IRI, used for expanding relative IRIs. - attr_reader :base - - # @return [RDF::URI] base IRI of the context, if loaded remotely. - attr_accessor :context_base - - # Term definitions - # @return [Hash{String => TermDefinition}] - attr_reader :term_definitions - - # @return [Hash{RDF::URI => String}] Reverse mappings from IRI to term only for terms, not CURIEs XXX - attr_accessor :iri_to_term - - # Previous definition for this context. This is used for rolling back type-scoped contexts. - # @return [Context] - attr_accessor :previous_context - - # Context is property-scoped - # @return [Boolean] - attr_accessor :property_scoped - - # Default language - # - # This adds a language to plain strings that aren't otherwise coerced - # @return [String] - attr_reader :default_language - - # Default direction - # - # This adds a direction to plain strings that aren't otherwise coerced - # @return ["lrt", "rtl"] - attr_reader :default_direction - - # Default vocabulary - # - # Sets the default vocabulary used for expanding terms which - # aren't otherwise absolute IRIs - # @return [RDF::URI] - attr_reader :vocab - - # @return [Hash{Symbol => Object}] Global options used in generating IRIs - attr_accessor :options - - # @return [BlankNodeNamer] - attr_accessor :namer - - ## - # Create a new context by parsing a context. - # - # @see #initialize - # @see #parse - # @param [String, #read, Array, Hash, Context] local_context - # @param [String, #to_s] base (nil) - # The Base IRI to use when expanding the document. This overrides the value of `input` if it is a _IRI_. If not specified and `input` is not an _IRI_, the base IRI defaults to the current document IRI if in a browser context, or the empty string if there is no document context. - # @param [Boolean] override_protected (false) - # Protected terms may be cleared. - # @param [Boolean] propagate (true) - # If false, retains any previously defined term, which can be rolled back when the descending into a new node object changes. - # @raise [JsonLdError] - # on a remote context load error, syntax error, or a reference to a term which is not defined. - # @return [Context] - def self.parse(local_context, - base: nil, - override_protected: false, - propagate: true, - **options) - c = self.new(**options) - if local_context.respond_to?(:empty?) && local_context.empty? - c - else - c.parse(local_context, - base: base, - override_protected: override_protected, - propagate: propagate) + # Default direction + # + # This adds a direction to plain strings that aren't otherwise coerced + # @return ["lrt", "rtl"] + attr_reader :default_direction + + # Default vocabulary + # + # Sets the default vocabulary used for expanding terms which + # aren't otherwise absolute IRIs + # @return [RDF::URI] + attr_reader :vocab + + # @return [Hash{Symbol => Object}] Global options used in generating IRIs + attr_accessor :options + + # @return [BlankNodeNamer] + attr_accessor :namer + + ## + # Create a new context by parsing a context. + # + # @see #initialize + # @see #parse + # @param [String, #read, Array, Hash, Context] local_context + # @param [String, #to_s] base (nil) + # The Base IRI to use when expanding the document. This overrides the value of `input` if it is a _IRI_. If not specified and `input` is not an _IRI_, the base IRI defaults to the current document IRI if in a browser context, or the empty string if there is no document context. + # @param [Boolean] override_protected (false) + # Protected terms may be cleared. + # @param [Boolean] propagate (true) + # If false, retains any previously defined term, which can be rolled back when the descending into a new node object changes. + # @raise [JsonLdError] + # on a remote context load error, syntax error, or a reference to a term which is not defined. + # @return [Context] + def self.parse(local_context, + base: nil, + override_protected: false, + propagate: true, + **options) + c = new(**options) + if local_context.respond_to?(:empty?) && local_context.empty? + c + else + c.parse(local_context, + base: base, + override_protected: override_protected, + propagate: propagate) + end end - end - ## - # Class-level cache used for retaining parsed remote contexts. - # - # @return [RDF::Util::Cache] - # @private - def self.cache - @cache ||= RDF::Util::Cache.new(CACHE_SIZE) - end + ## + # Class-level cache used for retaining parsed remote contexts. + # + # @return [RDF::Util::Cache] + # @private + def self.cache + @cache ||= RDF::Util::Cache.new(CACHE_SIZE) + end - ## - # Class-level cache inverse contexts. - # - # @return [RDF::Util::Cache] - # @private - def self.inverse_cache - @inverse_cache ||= RDF::Util::Cache.new(CACHE_SIZE) - end + ## + # Class-level cache inverse contexts. + # + # @return [RDF::Util::Cache] + # @private + def self.inverse_cache + @inverse_cache ||= RDF::Util::Cache.new(CACHE_SIZE) + end - ## - # @private - # Allow caching of well-known contexts - def self.new(**options) - if (options.keys - [ - :compactArrays, - :documentLoader, - :extractAllScripts, - :ordered, - :processingMode, - :validate - ]).empty? - # allow caching - key = options.hash - INITIAL_CONTEXTS[key] ||= begin + ## + # @private + # Allow caching of well-known contexts + def self.new(**options) + if (options.keys - %i[ + compactArrays + documentLoader + extractAllScripts + ordered + processingMode + validate + ]).empty? + # allow caching + key = options.hash + INITIAL_CONTEXTS[key] ||= begin + context = JSON::LD::Context.allocate + context.send(:initialize, **options) + context.freeze + context.term_definitions.freeze + context + end + else + # Don't try to cache context = JSON::LD::Context.allocate context.send(:initialize, **options) - context.freeze - context.term_definitions.freeze context end - else - # Don't try to cache - context = JSON::LD::Context.allocate - context.send(:initialize, **options) - context end - end - ## - # Create new evaluation context - # @param [Hash] options - # @option options [Hash{Symbol => String}] :prefixes - # See `RDF::Reader#initialize` - # @option options [String, #to_s] :vocab - # Initial value for @vocab - # @option options [String, #to_s] :language - # Initial value for @langauge - # @yield [ec] - # @yieldparam [Context] - # @return [Context] - def initialize(**options) - if options[:processingMode] == 'json-ld-1.0' - @processingMode = 'json-ld-1.0' - end - @term_definitions = {} - @iri_to_term = { - RDF.to_uri.to_s => "rdf", - RDF::XSD.to_uri.to_s => "xsd" - } - @namer = BlankNodeMapper.new("t") - - @options = options - - # Load any defined prefixes - (options[:prefixes] || {}).each_pair do |k, v| - next if k.nil? - @iri_to_term[v.to_s] = k - @term_definitions[k.to_s] = TermDefinition.new(k, id: v.to_s, simple: true, prefix: true) - end + ## + # Create new evaluation context + # @param [Hash] options + # @option options [Hash{Symbol => String}] :prefixes + # See `RDF::Reader#initialize` + # @option options [String, #to_s] :vocab + # Initial value for @vocab + # @option options [String, #to_s] :language + # Initial value for @langauge + # @yield [ec] + # @yieldparam [Context] + # @return [Context] + def initialize(**options) + @processingMode = 'json-ld-1.0' if options[:processingMode] == 'json-ld-1.0' + @term_definitions = {} + @iri_to_term = { + RDF.to_uri.to_s => "rdf", + RDF::XSD.to_uri.to_s => "xsd" + } + @namer = BlankNodeMapper.new("t") + + @options = options + + # Load any defined prefixes + (options[:prefixes] || {}).each_pair do |k, v| + next if k.nil? + + @iri_to_term[v.to_s] = k + @term_definitions[k.to_s] = TermDefinition.new(k, id: v.to_s, simple: true, prefix: true) + end - self.vocab = options[:vocab] if options[:vocab] - self.default_language = options[:language] if options[:language] =~ /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/ - @term_definitions = options[:term_definitions] if options[:term_definitions] + self.vocab = options[:vocab] if options[:vocab] + self.default_language = options[:language] if /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/.match?(options[:language]) + @term_definitions = options[:term_definitions] if options[:term_definitions] - # log_debug("init") {"iri_to_term: #{iri_to_term.inspect}"} + # log_debug("init") {"iri_to_term: #{iri_to_term.inspect}"} - yield(self) if block_given? - end + yield(self) if block_given? + end - # Create an Evaluation Context - # - # When processing a JSON-LD data structure, each processing rule is applied using information provided by the active context. This section describes how to produce an active context. - # - # The active context contains the active term definitions which specify how properties and values have to be interpreted as well as the current base IRI, the vocabulary mapping and the default language. Each term definition consists of an IRI mapping, a boolean flag reverse property, an optional type mapping or language mapping, and an optional container mapping. A term definition can not only be used to map a term to an IRI, but also to map a term to a keyword, in which case it is referred to as a keyword alias. - # - # When processing, the active context is initialized without any term definitions, vocabulary mapping, or default language. If a local context is encountered during processing, a new active context is created by cloning the existing active context. Then the information from the local context is merged into the new active context. Given that local contexts may contain references to remote contexts, this includes their retrieval. - # - # - # @param [String, #read, Array, Hash, Context] local_context - # @param [String, #to_s] base - # The Base IRI to use when expanding the document. This overrides the value of `input` if it is a _IRI_. If not specified and `input` is not an _IRI_, the base IRI defaults to the current document IRI if in a browser context, or the empty string if there is no document context. - # @param [Boolean] override_protected Protected terms may be cleared. - # @param [Boolean] propagate (true) - # If false, retains any previously defined term, which can be rolled back when the descending into a new node object changes. - # @param [Array] remote_contexts ([]) - # @param [Boolean] validate_scoped (true). - # Validate scoped context, loading if necessary. - # If false, do not load scoped contexts. - # @raise [JsonLdError] - # on a remote context load error, syntax error, or a reference to a term which is not defined. - # @return [Context] - # @see https://www.w3.org/TR/json-ld11-api/index.html#context-processing-algorithm - def parse(local_context, - base: nil, - override_protected: false, - propagate: true, - remote_contexts: [], - validate_scoped: true) - result = self.dup - # Early check for @propagate, which can only appear in a local context - propagate = local_context.is_a?(Hash) ? local_context.fetch('@propagate', propagate) : propagate - result.previous_context ||= result.dup unless propagate - - local_context = as_array(local_context) - - log_depth do - local_context.each do |context| - case context - when nil,false - # 3.1 If the `override_protected` is false, and the active context contains protected terms, an error is raised. - if override_protected || result.term_definitions.values.none?(&:protected?) - null_context = Context.new(**options) - null_context.previous_context = result unless propagate - result = null_context - else - raise JSON::LD::JsonLdError::InvalidContextNullification, + # Create an Evaluation Context + # + # When processing a JSON-LD data structure, each processing rule is applied using information provided by the active context. This section describes how to produce an active context. + # + # The active context contains the active term definitions which specify how properties and values have to be interpreted as well as the current base IRI, the vocabulary mapping and the default language. Each term definition consists of an IRI mapping, a boolean flag reverse property, an optional type mapping or language mapping, and an optional container mapping. A term definition can not only be used to map a term to an IRI, but also to map a term to a keyword, in which case it is referred to as a keyword alias. + # + # When processing, the active context is initialized without any term definitions, vocabulary mapping, or default language. If a local context is encountered during processing, a new active context is created by cloning the existing active context. Then the information from the local context is merged into the new active context. Given that local contexts may contain references to remote contexts, this includes their retrieval. + # + # + # @param [String, #read, Array, Hash, Context] local_context + # @param [String, #to_s] base + # The Base IRI to use when expanding the document. This overrides the value of `input` if it is a _IRI_. If not specified and `input` is not an _IRI_, the base IRI defaults to the current document IRI if in a browser context, or the empty string if there is no document context. + # @param [Boolean] override_protected Protected terms may be cleared. + # @param [Boolean] propagate (true) + # If false, retains any previously defined term, which can be rolled back when the descending into a new node object changes. + # @param [Array] remote_contexts ([]) + # @param [Boolean] validate_scoped (true). + # Validate scoped context, loading if necessary. + # If false, do not load scoped contexts. + # @raise [JsonLdError] + # on a remote context load error, syntax error, or a reference to a term which is not defined. + # @return [Context] + # @see https://www.w3.org/TR/json-ld11-api/index.html#context-processing-algorithm + def parse(local_context, + base: nil, + override_protected: false, + propagate: true, + remote_contexts: [], + validate_scoped: true) + result = dup + # Early check for @propagate, which can only appear in a local context + propagate = local_context.is_a?(Hash) ? local_context.fetch('@propagate', propagate) : propagate + result.previous_context ||= result.dup unless propagate + + local_context = as_array(local_context) + + log_depth do + local_context.each do |context| + case context + when nil, false + # 3.1 If the `override_protected` is false, and the active context contains protected terms, an error is raised. + if override_protected || result.term_definitions.values.none?(&:protected?) + null_context = Context.new(**options) + null_context.previous_context = result unless propagate + result = null_context + else + raise JSON::LD::JsonLdError::InvalidContextNullification, "Attempt to clear a context with protected terms" - end - when Context - # log_debug("parse") {"context: #{context.inspect}"} - result = result.merge(context) - when IO, StringIO - # log_debug("parse") {"io: #{context}"} - # Load context document, if it is an open file - begin - ctx = load_context(context, **@options) - raise JSON::LD::JsonLdError::InvalidRemoteContext, "Context missing @context key" if @options[:validate] && ctx['@context'].nil? - result = result.parse(ctx["@context"] ? ctx["@context"] : {}) - rescue JSON::ParserError => e - log_info("parse") {"Failed to parse @context from remote document at #{context}: #{e.message}"} - raise JSON::LD::JsonLdError::InvalidRemoteContext, "Failed to parse remote context at #{context}: #{e.message}" if @options[:validate] - self - end - when String, RDF::URI - # log_debug("parse") {"remote: #{context}, base: #{result.context_base || result.base}"} + end + when Context + # log_debug("parse") {"context: #{context.inspect}"} + result = result.merge(context) + when IO, StringIO + # log_debug("parse") {"io: #{context}"} + # Load context document, if it is an open file + begin + ctx = load_context(context, **@options) + if @options[:validate] && ctx['@context'].nil? + raise JSON::LD::JsonLdError::InvalidRemoteContext, + "Context missing @context key" + end - # 3.2.1) Set context to the result of resolving value against the base IRI which is established as specified in section 5.1 Establishing a Base URI of [RFC3986]. Only the basic algorithm in section 5.2 of [RFC3986] is used; neither Syntax-Based Normalization nor Scheme-Based Normalization are performed. Characters additionally allowed in IRI references are treated in the same way that unreserved characters are treated in URI references, per section 6.5 of [RFC3987]. - context = RDF::URI(result.context_base || base).join(context) - context_canon = context.canonicalize - context_canon.scheme = 'http' if context_canon.scheme == 'https' + result = result.parse(ctx["@context"] || {}) + rescue JSON::ParserError => e + log_info("parse") { "Failed to parse @context from remote document at #{context}: #{e.message}" } + if @options[:validate] + raise JSON::LD::JsonLdError::InvalidRemoteContext, + "Failed to parse remote context at #{context}: #{e.message}" + end - # If validating a scoped context which has already been loaded, skip to the next one - next if !validate_scoped && remote_contexts.include?(context.to_s) + self + end + when String, RDF::URI + # log_debug("parse") {"remote: #{context}, base: #{result.context_base || result.base}"} - remote_contexts << context.to_s - raise JsonLdError::ContextOverflow, "#{context}" if remote_contexts.length >= MAX_CONTEXTS_LOADED + # 3.2.1) Set context to the result of resolving value against the base IRI which is established as specified in section 5.1 Establishing a Base URI of [RFC3986]. Only the basic algorithm in section 5.2 of [RFC3986] is used; neither Syntax-Based Normalization nor Scheme-Based Normalization are performed. Characters additionally allowed in IRI references are treated in the same way that unreserved characters are treated in URI references, per section 6.5 of [RFC3987]. + context = RDF::URI(result.context_base || base).join(context) + context_canon = context.canonicalize + context_canon.scheme = 'http' if context_canon.scheme == 'https' - cached_context = if PRELOADED[context_canon.to_s] - # If we have a cached context, merge it into the current context (result) and use as the new context - # log_debug("parse") {"=> cached_context: #{context_canon.to_s.inspect}"} + # If validating a scoped context which has already been loaded, skip to the next one + next if !validate_scoped && remote_contexts.include?(context.to_s) - # If this is a Proc, then replace the entry with the result of running the Proc - if PRELOADED[context_canon.to_s].respond_to?(:call) - # log_debug("parse") {"=> (call)"} - PRELOADED[context_canon.to_s] = PRELOADED[context_canon.to_s].call - end - PRELOADED[context_canon.to_s].context_base ||= context_canon.to_s - PRELOADED[context_canon.to_s] - else - # Load context document, if it is a string - Context.cache[context_canon.to_s] ||= begin - context_opts = @options.merge( - profile: 'http://www.w3.org/ns/json-ld#context', - requestProfile: 'http://www.w3.org/ns/json-ld#context', - base: nil) - #context_opts.delete(:headers) - JSON::LD::API.loadRemoteDocument(context.to_s, **context_opts) do |remote_doc| - # 3.2.5) Dereference context. If the dereferenced document has no top-level JSON object with an @context member, an invalid remote context has been detected and processing is aborted; otherwise, set context to the value of that member. - raise JsonLdError::InvalidRemoteContext, "#{context}" unless remote_doc.document.is_a?(Hash) && remote_doc.document.key?('@context') - - # Parse stand-alone - ctx = Context.new(unfrozen: true, **options).dup - ctx.context_base = context.to_s - ctx = ctx.parse(remote_doc.document['@context'], remote_contexts: remote_contexts.dup) - ctx.context_base = context.to_s # In case it was altered - ctx.instance_variable_set(:@base, nil) - ctx - end - rescue JsonLdError::LoadingDocumentFailed => e - log_info("parse") {"Failed to retrieve @context from remote document at #{context_canon.inspect}: #{e.message}"} - raise JsonLdError::LoadingRemoteContextFailed, "#{context}: #{e.message}", e.backtrace - rescue JsonLdError - raise - rescue StandardError => e - log_info("parse") {"Failed to retrieve @context from remote document at #{context_canon.inspect}: #{e.message}"} - raise JsonLdError::LoadingRemoteContextFailed, "#{context}: #{e.message}", e.backtrace - end - end + remote_contexts << context.to_s + raise JsonLdError::ContextOverflow, context.to_s if remote_contexts.length >= MAX_CONTEXTS_LOADED - # Merge loaded context noting protected term overriding - context = result.merge(cached_context, override_protected: override_protected) + cached_context = if PRELOADED[context_canon.to_s] + # If we have a cached context, merge it into the current context (result) and use as the new context + # log_debug("parse") {"=> cached_context: #{context_canon.to_s.inspect}"} - context.previous_context = self unless propagate - result = context - when Hash - context = context.dup # keep from modifying a hash passed as a param - - # This counts on hash elements being processed in order - { - '@version' => :processingMode=, - '@import' => nil, - '@base' => :base=, - '@direction' => :default_direction=, - '@language' => :default_language=, - '@propagate' => :propagate=, - '@vocab' => :vocab=, - }.each do |key, setter| - next unless context.key?(key) - if key == '@import' - # Retrieve remote context and merge the remaining context object into the result. - raise JsonLdError::InvalidContextEntry, "@import may only be used in 1.1 mode}" if result.processingMode("json-ld-1.0") - raise JsonLdError::InvalidImportValue, "@import must be a string: #{context['@import'].inspect}" unless context['@import'].is_a?(String) - import_loc = RDF::URI(result.context_base || base).join(context['@import']) - begin - context_opts = @options.merge( - profile: 'http://www.w3.org/ns/json-ld#context', - requestProfile: 'http://www.w3.org/ns/json-ld#context', - base: nil) - context_opts.delete(:headers) - # FIXME: should cache this, but ContextCache is for parsed contexts - JSON::LD::API.loadRemoteDocument(import_loc, **context_opts) do |remote_doc| - # Dereference import_loc. If the dereferenced document has no top-level JSON object with an @context member, an invalid remote context has been detected and processing is aborted; otherwise, set context to the value of that member. - raise JsonLdError::InvalidRemoteContext, "#{import_loc}" unless remote_doc.document.is_a?(Hash) && remote_doc.document.key?('@context') - import_context = remote_doc.document['@context'] - import_context.delete('@base') - raise JsonLdError::InvalidRemoteContext, "#{import_context.to_json} must be an object" unless import_context.is_a?(Hash) - raise JsonLdError::InvalidContextEntry, "#{import_context.to_json} must not include @import entry" if import_context.key?('@import') - context.delete(key) - context = import_context.merge(context) + # If this is a Proc, then replace the entry with the result of running the Proc + if PRELOADED[context_canon.to_s].respond_to?(:call) + # log_debug("parse") {"=> (call)"} + PRELOADED[context_canon.to_s] = PRELOADED[context_canon.to_s].call + end + PRELOADED[context_canon.to_s].context_base ||= context_canon.to_s + PRELOADED[context_canon.to_s] + else + # Load context document, if it is a string + Context.cache[context_canon.to_s] ||= begin + context_opts = @options.merge( + profile: 'http://www.w3.org/ns/json-ld#context', + requestProfile: 'http://www.w3.org/ns/json-ld#context', + base: nil + ) + # context_opts.delete(:headers) + JSON::LD::API.loadRemoteDocument(context.to_s, **context_opts) do |remote_doc| + # 3.2.5) Dereference context. If the dereferenced document has no top-level JSON object with an @context member, an invalid remote context has been detected and processing is aborted; otherwise, set context to the value of that member. + unless remote_doc.document.is_a?(Hash) && remote_doc.document.key?('@context') + raise JsonLdError::InvalidRemoteContext, + context.to_s + end + + # Parse stand-alone + ctx = Context.new(unfrozen: true, **options).dup + ctx.context_base = context.to_s + ctx = ctx.parse(remote_doc.document['@context'], remote_contexts: remote_contexts.dup) + ctx.context_base = context.to_s # In case it was altered + ctx.instance_variable_set(:@base, nil) + ctx + end + rescue JsonLdError::LoadingDocumentFailed => e + log_info("parse") do + "Failed to retrieve @context from remote document at #{context_canon.inspect}: #{e.message}" + end + raise JsonLdError::LoadingRemoteContextFailed, "#{context}: #{e.message}", e.backtrace + rescue JsonLdError + raise + rescue StandardError => e + log_info("parse") do + "Failed to retrieve @context from remote document at #{context_canon.inspect}: #{e.message}" + end + raise JsonLdError::LoadingRemoteContextFailed, "#{context}: #{e.message}", e.backtrace end - rescue JsonLdError::LoadingDocumentFailed => e - raise JsonLdError::LoadingRemoteContextFailed, "#{import_loc}: #{e.message}", e.backtrace - rescue JsonLdError - raise - rescue StandardError => e - raise JsonLdError::LoadingRemoteContextFailed, "#{import_loc}: #{e.message}", e.backtrace end - else - result.send(setter, context[key], remote_contexts: remote_contexts) - end - context.delete(key) - end - defined = {} + # Merge loaded context noting protected term overriding + context = result.merge(cached_context, override_protected: override_protected) + + context.previous_context = self unless propagate + result = context + when Hash + context = context.dup # keep from modifying a hash passed as a param + + # This counts on hash elements being processed in order + { + '@version' => :processingMode=, + '@import' => nil, + '@base' => :base=, + '@direction' => :default_direction=, + '@language' => :default_language=, + '@propagate' => :propagate=, + '@vocab' => :vocab= + }.each do |key, setter| + next unless context.key?(key) + + if key == '@import' + # Retrieve remote context and merge the remaining context object into the result. + if result.processingMode("json-ld-1.0") + raise JsonLdError::InvalidContextEntry, + "@import may only be used in 1.1 mode}" + end + unless context['@import'].is_a?(String) + raise JsonLdError::InvalidImportValue, + "@import must be a string: #{context['@import'].inspect}" + end + + import_loc = RDF::URI(result.context_base || base).join(context['@import']) + begin + context_opts = @options.merge( + profile: 'http://www.w3.org/ns/json-ld#context', + requestProfile: 'http://www.w3.org/ns/json-ld#context', + base: nil + ) + context_opts.delete(:headers) + # FIXME: should cache this, but ContextCache is for parsed contexts + JSON::LD::API.loadRemoteDocument(import_loc, **context_opts) do |remote_doc| + # Dereference import_loc. If the dereferenced document has no top-level JSON object with an @context member, an invalid remote context has been detected and processing is aborted; otherwise, set context to the value of that member. + unless remote_doc.document.is_a?(Hash) && remote_doc.document.key?('@context') + raise JsonLdError::InvalidRemoteContext, + import_loc.to_s + end + + import_context = remote_doc.document['@context'] + import_context.delete('@base') + unless import_context.is_a?(Hash) + raise JsonLdError::InvalidRemoteContext, + "#{import_context.to_json} must be an object" + end + if import_context.key?('@import') + raise JsonLdError::InvalidContextEntry, + "#{import_context.to_json} must not include @import entry" + end + + context.delete(key) + context = import_context.merge(context) + end + rescue JsonLdError::LoadingDocumentFailed => e + raise JsonLdError::LoadingRemoteContextFailed, "#{import_loc}: #{e.message}", e.backtrace + rescue JsonLdError + raise + rescue StandardError => e + raise JsonLdError::LoadingRemoteContextFailed, "#{import_loc}: #{e.message}", e.backtrace + end + else + result.send(setter, context[key], remote_contexts: remote_contexts) + end + context.delete(key) + end + + defined = {} - # For each key-value pair in context invoke the Create Term Definition subalgorithm, passing result for active context, context for local context, key, and defined - context.each_key do |key| - # ... where key is not @base, @vocab, @language, or @version - result.create_term_definition(context, key, defined, - base: base, - override_protected: override_protected, - protected: context['@protected'], - remote_contexts: remote_contexts.dup, - validate_scoped: validate_scoped - ) unless NON_TERMDEF_KEYS.include?(key) + # For each key-value pair in context invoke the Create Term Definition subalgorithm, passing result for active context, context for local context, key, and defined + context.each_key do |key| + # ... where key is not @base, @vocab, @language, or @version + next if NON_TERMDEF_KEYS.include?(key) + + result.create_term_definition(context, key, defined, + base: base, + override_protected: override_protected, + protected: context['@protected'], + remote_contexts: remote_contexts.dup, + validate_scoped: validate_scoped) + end + else + # 3.3) If context is not a JSON object, an invalid local context error has been detected and processing is aborted. + raise JsonLdError::InvalidLocalContext, "must be a URL, JSON object or array of same: #{context.inspect}" + end end - else - # 3.3) If context is not a JSON object, an invalid local context error has been detected and processing is aborted. - raise JsonLdError::InvalidLocalContext, "must be a URL, JSON object or array of same: #{context.inspect}" end + result end - end - result - end - ## - # Merge in a context, creating a new context with updates from `context` - # - # @param [Context] context - # @param [Boolean] override_protected Allow or disallow protected terms to be changed - # @return [Context] - def merge(context, override_protected: false) - ctx = Context.new(term_definitions: self.term_definitions, standard_prefixes: options[:standard_prefixes]) - ctx.context_base = context.context_base || self.context_base - ctx.default_language = context.default_language || self.default_language - ctx.default_direction = context.default_direction || self.default_direction - ctx.vocab = context.vocab || self.vocab - ctx.base = self.base unless self.base.nil? - if !override_protected - ctx.term_definitions.each do |term, definition| - next unless definition.protected? && (other = context.term_definitions[term]) - unless definition == other - raise JSON::LD::JsonLdError::ProtectedTermRedefinition, "Attempt to redefine protected term #{term}" + ## + # Merge in a context, creating a new context with updates from `context` + # + # @param [Context] context + # @param [Boolean] override_protected Allow or disallow protected terms to be changed + # @return [Context] + def merge(context, override_protected: false) + ctx = Context.new(term_definitions: term_definitions, standard_prefixes: options[:standard_prefixes]) + ctx.context_base = context.context_base || context_base + ctx.default_language = context.default_language || default_language + ctx.default_direction = context.default_direction || default_direction + ctx.vocab = context.vocab || vocab + ctx.base = base unless base.nil? + unless override_protected + ctx.term_definitions.each do |term, definition| + next unless definition.protected? && (other = context.term_definitions[term]) + unless definition == other + raise JSON::LD::JsonLdError::ProtectedTermRedefinition, "Attempt to redefine protected term #{term}" + end end end - end - # Add term definitions - context.term_definitions.each do |term, definition| - ctx.term_definitions[term] = definition + # Add term definitions + context.term_definitions.each do |term, definition| + ctx.term_definitions[term] = definition + end + ctx end - ctx - end - # The following constants are used to reduce object allocations in #create_term_definition below - ID_NULL_OBJECT = { '@id' => nil }.freeze - NON_TERMDEF_KEYS = Set.new(%w(@base @direction @language @protected @version @vocab)).freeze - JSON_LD_10_EXPECTED_KEYS = Set.new(%w(@container @id @language @reverse @type)).freeze - JSON_LD_11_EXPECTED_KEYS = Set.new(%w(@context @direction @index @nest @prefix @protected)).freeze - JSON_LD_EXPECTED_KEYS = (JSON_LD_10_EXPECTED_KEYS + JSON_LD_11_EXPECTED_KEYS).freeze - JSON_LD_10_TYPE_VALUES = Set.new(%w(@id @vocab)).freeze - JSON_LD_11_TYPE_VALUES = Set.new(%w(@json @none)).freeze - PREFIX_URI_ENDINGS = Set.new(%w(: / ? # [ ] @)).freeze - - ## - # Create Term Definition - # - # Term definitions are created by parsing the information in the given local context for the given term. If the given term is a compact IRI, it may omit an IRI mapping by depending on its prefix having its own term definition. If the prefix is a key in the local context, then its term definition must first be created, through recursion, before continuing. Because a term definition can depend on other term definitions, a mechanism must be used to detect cyclical dependencies. The solution employed here uses a map, defined, that keeps track of whether or not a term has been defined or is currently in the process of being defined. This map is checked before any recursion is attempted. - # - # After all dependencies for a term have been defined, the rest of the information in the local context for the given term is taken into account, creating the appropriate IRI mapping, container mapping, and type mapping or language mapping for the term. - # - # @param [Hash] local_context - # @param [String] term - # @param [Hash] defined - # @param [String, RDF::URI] base for resolving document-relative IRIs - # @param [Boolean] protected if true, causes all terms to be marked protected - # @param [Boolean] override_protected Protected terms may be cleared. - # @param [Array] remote_contexts - # @param [Boolean] validate_scoped (true). - # Validate scoped context, loading if necessary. - # If false, do not load scoped contexts. - # @raise [JsonLdError] - # Represents a cyclical term dependency - # @see https://www.w3.org/TR/json-ld11-api/index.html#create-term-definition - def create_term_definition(local_context, term, defined, - base: nil, - override_protected: false, - protected: nil, - remote_contexts: [], - validate_scoped: true) - # Expand a string value, unless it matches a keyword - # log_debug("create_term_definition") {"term = #{term.inspect}"} - - # If defined contains the key term, then the associated value must be true, indicating that the term definition has already been created, so return. Otherwise, a cyclical term definition has been detected, which is an error. - case defined[term] - when TrueClass then return - when nil - defined[term] = false - else - raise JsonLdError::CyclicIRIMapping, "Cyclical term dependency found: #{term.inspect}" - end + # The following constants are used to reduce object allocations in #create_term_definition below + ID_NULL_OBJECT = { '@id' => nil }.freeze + NON_TERMDEF_KEYS = Set.new(%w[@base @direction @language @protected @version @vocab]).freeze + JSON_LD_10_EXPECTED_KEYS = Set.new(%w[@container @id @language @reverse @type]).freeze + JSON_LD_11_EXPECTED_KEYS = Set.new(%w[@context @direction @index @nest @prefix @protected]).freeze + JSON_LD_EXPECTED_KEYS = (JSON_LD_10_EXPECTED_KEYS + JSON_LD_11_EXPECTED_KEYS).freeze + JSON_LD_10_TYPE_VALUES = Set.new(%w[@id @vocab]).freeze + JSON_LD_11_TYPE_VALUES = Set.new(%w[@json @none]).freeze + PREFIX_URI_ENDINGS = Set.new(%w(: / ? # [ ] @)).freeze - # Initialize value to a the value associated with the key term in local context. - value = local_context.fetch(term, false) - simple_term = value.is_a?(String) || value.nil? - - # Since keywords cannot be overridden, term must not be a keyword. Otherwise, an invalid value has been detected, which is an error. - if term == '@type' && - value.is_a?(Hash) && - !value.empty? && - processingMode("json-ld-1.1") && - (value.keys - %w(@container @protected)).empty? && - value.fetch('@container', '@set') == '@set' - # thes are the only cases were redefining a keyword is allowed - elsif KEYWORDS.include?(term) # TODO anything that looks like a keyword - raise JsonLdError::KeywordRedefinition, "term must not be a keyword: #{term.inspect}" if - @options[:validate] - elsif term.to_s.match?(/^@[a-zA-Z]+$/) && @options[:validate] - warn "Terms beginning with '@' are reserved for future use and ignored: #{term}." - return - elsif !term_valid?(term) && @options[:validate] - raise JsonLdError::InvalidTermDefinition, "term is invalid: #{term.inspect}" - end + ## + # Create Term Definition + # + # Term definitions are created by parsing the information in the given local context for the given term. If the given term is a compact IRI, it may omit an IRI mapping by depending on its prefix having its own term definition. If the prefix is a key in the local context, then its term definition must first be created, through recursion, before continuing. Because a term definition can depend on other term definitions, a mechanism must be used to detect cyclical dependencies. The solution employed here uses a map, defined, that keeps track of whether or not a term has been defined or is currently in the process of being defined. This map is checked before any recursion is attempted. + # + # After all dependencies for a term have been defined, the rest of the information in the local context for the given term is taken into account, creating the appropriate IRI mapping, container mapping, and type mapping or language mapping for the term. + # + # @param [Hash] local_context + # @param [String] term + # @param [Hash] defined + # @param [String, RDF::URI] base for resolving document-relative IRIs + # @param [Boolean] protected if true, causes all terms to be marked protected + # @param [Boolean] override_protected Protected terms may be cleared. + # @param [Array] remote_contexts + # @param [Boolean] validate_scoped (true). + # Validate scoped context, loading if necessary. + # If false, do not load scoped contexts. + # @raise [JsonLdError] + # Represents a cyclical term dependency + # @see https://www.w3.org/TR/json-ld11-api/index.html#create-term-definition + def create_term_definition(local_context, term, defined, + base: nil, + override_protected: false, + protected: nil, + remote_contexts: [], + validate_scoped: true) + # Expand a string value, unless it matches a keyword + # log_debug("create_term_definition") {"term = #{term.inspect}"} + + # If defined contains the key term, then the associated value must be true, indicating that the term definition has already been created, so return. Otherwise, a cyclical term definition has been detected, which is an error. + case defined[term] + when TrueClass then return + when nil + defined[term] = false + else + raise JsonLdError::CyclicIRIMapping, "Cyclical term dependency found: #{term.inspect}" + end - value = {'@id' => value} if simple_term + # Initialize value to a the value associated with the key term in local context. + value = local_context.fetch(term, false) + simple_term = value.is_a?(String) || value.nil? + + # Since keywords cannot be overridden, term must not be a keyword. Otherwise, an invalid value has been detected, which is an error. + if term == '@type' && + value.is_a?(Hash) && + !value.empty? && + processingMode("json-ld-1.1") && + (value.keys - %w[@container @protected]).empty? && + value.fetch('@container', '@set') == '@set' + # thes are the only cases were redefining a keyword is allowed + elsif KEYWORDS.include?(term) # TODO: anything that looks like a keyword + raise JsonLdError::KeywordRedefinition, "term must not be a keyword: #{term.inspect}" if + @options[:validate] + elsif term.to_s.match?(/^@[a-zA-Z]+$/) && @options[:validate] + warn "Terms beginning with '@' are reserved for future use and ignored: #{term}." + return + elsif !term_valid?(term) && @options[:validate] + raise JsonLdError::InvalidTermDefinition, "term is invalid: #{term.inspect}" + end - # Remove any existing term definition for term in active context. - previous_definition = term_definitions[term] - if previous_definition && previous_definition.protected? && !override_protected - # Check later to detect identical redefinition - else - term_definitions.delete(term) if previous_definition - end + value = { '@id' => value } if simple_term - raise JsonLdError::InvalidTermDefinition, "Term definition for #{term.inspect} is an #{value.class} on term #{term.inspect}" unless value.is_a?(Hash) + # Remove any existing term definition for term in active context. + previous_definition = term_definitions[term] + if previous_definition&.protected? && !override_protected + # Check later to detect identical redefinition + elsif previous_definition + term_definitions.delete(term) + end - # log_debug("") {"Hash[#{term.inspect}] = #{value.inspect}"} - definition = TermDefinition.new(term) - definition.simple = simple_term + unless value.is_a?(Hash) + raise JsonLdError::InvalidTermDefinition, + "Term definition for #{term.inspect} is an #{value.class} on term #{term.inspect}" + end - expected_keys = case processingMode - when "json-ld-1.0" then JSON_LD_10_EXPECTED_KEYS - else JSON_LD_EXPECTED_KEYS - end + # log_debug("") {"Hash[#{term.inspect}] = #{value.inspect}"} + definition = TermDefinition.new(term) + definition.simple = simple_term - # Any of these keys cause us to process as json-ld-1.1, unless otherwise set - if processingMode.nil? && value.any? { |key, _| !JSON_LD_11_EXPECTED_KEYS.include?(key) } - processingMode('json-ld-11') - end + expected_keys = case processingMode + when "json-ld-1.0" then JSON_LD_10_EXPECTED_KEYS + else JSON_LD_EXPECTED_KEYS + end - if value.any? { |key, _| !expected_keys.include?(key) } - extra_keys = value.keys - expected_keys.to_a - raise JsonLdError::InvalidTermDefinition, "Term definition for #{term.inspect} has unexpected keys: #{extra_keys.join(', ')}" - end + # Any of these keys cause us to process as json-ld-1.1, unless otherwise set + if processingMode.nil? && value.any? { |key, _| !JSON_LD_11_EXPECTED_KEYS.include?(key) } + processingMode('json-ld-11') + end - # Potentially note that the term is protected - definition.protected = value.fetch('@protected', protected) + if value.any? { |key, _| !expected_keys.include?(key) } + extra_keys = value.keys - expected_keys.to_a + raise JsonLdError::InvalidTermDefinition, + "Term definition for #{term.inspect} has unexpected keys: #{extra_keys.join(', ')}" + end - if value.key?('@type') - type = value['@type'] - # SPEC FIXME: @type may be nil - type = case type - when nil - type - when String - begin - expand_iri(type, vocab: true, documentRelative: false, local_context: local_context, defined: defined) - rescue JsonLdError::InvalidIRIMapping - raise JsonLdError::InvalidTypeMapping, "invalid mapping for '@type': #{type.inspect} on term #{term.inspect}" + # Potentially note that the term is protected + definition.protected = value.fetch('@protected', protected) + + if value.key?('@type') + type = value['@type'] + # SPEC FIXME: @type may be nil + type = case type + when nil + type + when String + begin + expand_iri(type, vocab: true, documentRelative: false, local_context: local_context, defined: defined) + rescue JsonLdError::InvalidIRIMapping + raise JsonLdError::InvalidTypeMapping, + "invalid mapping for '@type': #{type.inspect} on term #{term.inspect}" + end + else + :error end - else - :error - end - if JSON_LD_11_TYPE_VALUES.include?(type) && processingMode('json-ld-1.1') - # This is okay and used in compaction in 1.1 - elsif !JSON_LD_10_TYPE_VALUES.include?(type) && !(type.is_a?(RDF::URI) && type.absolute?) - raise JsonLdError::InvalidTypeMapping, "unknown mapping for '@type': #{type.inspect} on term #{term.inspect}" + if JSON_LD_11_TYPE_VALUES.include?(type) && processingMode('json-ld-1.1') + # This is okay and used in compaction in 1.1 + elsif !JSON_LD_10_TYPE_VALUES.include?(type) && !(type.is_a?(RDF::URI) && type.absolute?) + raise JsonLdError::InvalidTypeMapping, + "unknown mapping for '@type': #{type.inspect} on term #{term.inspect}" + end + # log_debug("") {"type_mapping: #{type.inspect}"} + definition.type_mapping = type end - # log_debug("") {"type_mapping: #{type.inspect}"} - definition.type_mapping = type - end - if value.key?('@reverse') - raise JsonLdError::InvalidReverseProperty, "unexpected key in #{value.inspect} on term #{term.inspect}" if - value.key?('@id') || value.key?('@nest') - raise JsonLdError::InvalidIRIMapping, "expected value of @reverse to be a string: #{value['@reverse'].inspect} on term #{term.inspect}" unless - value['@reverse'].is_a?(String) + if value.key?('@reverse') + raise JsonLdError::InvalidReverseProperty, "unexpected key in #{value.inspect} on term #{term.inspect}" if + value.key?('@id') || value.key?('@nest') - if value['@reverse'].to_s.match?(/^@[a-zA-Z]+$/) && @options[:validate] - warn "Values beginning with '@' are reserved for future use and ignored: #{value['@reverse']}." - return - end + unless value['@reverse'].is_a?(String) + raise JsonLdError::InvalidIRIMapping, + "expected value of @reverse to be a string: #{value['@reverse'].inspect} on term #{term.inspect}" + end - # Otherwise, set the IRI mapping of definition to the result of using the IRI Expansion algorithm, passing active context, the value associated with the @reverse key for value, true for vocab, true for document relative, local context, and defined. If the result is not an absolute IRI, i.e., it contains no colon (:), an invalid IRI mapping error has been detected and processing is aborted. - definition.id = expand_iri(value['@reverse'], - vocab: true, - local_context: local_context, - defined: defined) - raise JsonLdError::InvalidIRIMapping, "non-absolute @reverse IRI: #{definition.id} on term #{term.inspect}" unless - definition.id.is_a?(RDF::Node) || definition.id.is_a?(RDF::URI) && definition.id.absolute? + if value['@reverse'].to_s.match?(/^@[a-zA-Z]+$/) && @options[:validate] + warn "Values beginning with '@' are reserved for future use and ignored: #{value['@reverse']}." + return + end - if term[1..-1].to_s.include?(':') && (term_iri = expand_iri(term)) != definition.id - raise JsonLdError::InvalidIRIMapping, "term #{term} expands to #{definition.id}, not #{term_iri}" + # Otherwise, set the IRI mapping of definition to the result of using the IRI Expansion algorithm, passing active context, the value associated with the @reverse key for value, true for vocab, true for document relative, local context, and defined. If the result is not an absolute IRI, i.e., it contains no colon (:), an invalid IRI mapping error has been detected and processing is aborted. + definition.id = expand_iri(value['@reverse'], + vocab: true, + local_context: local_context, + defined: defined) + unless definition.id.is_a?(RDF::Node) || (definition.id.is_a?(RDF::URI) && definition.id.absolute?) + raise JsonLdError::InvalidIRIMapping, + "non-absolute @reverse IRI: #{definition.id} on term #{term.inspect}" + end + + if term[1..].to_s.include?(':') && (term_iri = expand_iri(term)) != definition.id + raise JsonLdError::InvalidIRIMapping, "term #{term} expands to #{definition.id}, not #{term_iri}" + end + + if @options[:validate] && processingMode('json-ld-1.1') && definition.id.to_s.start_with?("_:") + warn "[DEPRECATION] Blank Node terms deprecated in JSON-LD 1.1." + end + + # If value contains an @container member, set the container mapping of definition to its value; if its value is neither @set, @index, @type, @id, an absolute IRI nor null, an invalid reverse property error has been detected (reverse properties only support set- and index-containers) and processing is aborted. + if value.key?('@container') + container = value['@container'] + unless container.is_a?(String) && ['@set', '@index'].include?(container) + raise JsonLdError::InvalidReverseProperty, + "unknown mapping for '@container' to #{container.inspect} on term #{term.inspect}" + end + definition.container_mapping = check_container(container, local_context, defined, term) + end + definition.reverse_property = true + elsif value.key?('@id') && value['@id'].nil? + # Allowed to reserve a null term, which may be protected + elsif value.key?('@id') && value['@id'] != term + unless value['@id'].is_a?(String) + raise JsonLdError::InvalidIRIMapping, + "expected value of @id to be a string: #{value['@id'].inspect} on term #{term.inspect}" + end + + if !KEYWORDS.include?(value['@id'].to_s) && value['@id'].to_s.match?(/^@[a-zA-Z]+$/) && @options[:validate] + warn "Values beginning with '@' are reserved for future use and ignored: #{value['@id']}." + return + end + + definition.id = expand_iri(value['@id'], + vocab: true, + local_context: local_context, + defined: defined) + raise JsonLdError::InvalidKeywordAlias, "expected value of @id to not be @context on term #{term.inspect}" if + definition.id == '@context' + + if term.match?(%r{(?::[^:])|/}) + term_iri = expand_iri(term, + vocab: true, + local_context: local_context, + defined: defined.merge(term => true)) + if term_iri != definition.id + raise JsonLdError::InvalidIRIMapping, "term #{term} expands to #{definition.id}, not #{term_iri}" + end + end + + if @options[:validate] && processingMode('json-ld-1.1') && definition.id.to_s.start_with?("_:") + warn "[DEPRECATION] Blank Node terms deprecated in JSON-LD 1.1." + end + + # If id ends with a gen-delim, it may be used as a prefix for simple terms + definition.prefix = true if !term.include?(':') && + simple_term && + (definition.id.to_s.end_with?(':', '/', '?', '#', '[', ']', + '@') || definition.id.to_s.start_with?('_:')) + elsif term[1..].include?(':') + # If term is a compact IRI with a prefix that is a key in local context then a dependency has been found. Use this algorithm recursively passing active context, local context, the prefix as term, and defined. + prefix, suffix = term.split(':', 2) + create_term_definition(local_context, prefix, defined, protected: protected) if local_context.key?(prefix) + + definition.id = if (td = term_definitions[prefix]) + # If term's prefix has a term definition in active context, set the IRI mapping for definition to the result of concatenating the value associated with the prefix's IRI mapping and the term's suffix. + td.id + suffix + else + # Otherwise, term is an absolute IRI. Set the IRI mapping for definition to term + term + end + # log_debug("") {"=> #{definition.id}"} + elsif term.include?('/') + # If term is a relative IRI + definition.id = expand_iri(term, vocab: true) + raise JsonLdError::InvalidKeywordAlias, "expected term to expand to an absolute IRI #{term.inspect}" unless + definition.id.absolute? + elsif KEYWORDS.include?(term) + # This should only happen for @type when @container is @set + definition.id = term + else + # Otherwise, active context must have a vocabulary mapping, otherwise an invalid value has been detected, which is an error. Set the IRI mapping for definition to the result of concatenating the value associated with the vocabulary mapping and term. + unless vocab + raise JsonLdError::InvalidIRIMapping, + "relative term definition without vocab: #{term} on term #{term.inspect}" + end + + definition.id = vocab + term + # log_debug("") {"=> #{definition.id}"} end - warn "[DEPRECATION] Blank Node terms deprecated in JSON-LD 1.1." if @options[:validate] && processingMode('json-ld-1.1') && definition.id.to_s.start_with?("_:") + @iri_to_term[definition.id] = term if simple_term && definition.id - # If value contains an @container member, set the container mapping of definition to its value; if its value is neither @set, @index, @type, @id, an absolute IRI nor null, an invalid reverse property error has been detected (reverse properties only support set- and index-containers) and processing is aborted. if value.key?('@container') - container = value['@container'] - raise JsonLdError::InvalidReverseProperty, - "unknown mapping for '@container' to #{container.inspect} on term #{term.inspect}" unless - container.is_a?(String) && (container == '@set' || container == '@index') - definition.container_mapping = check_container(container, local_context, defined, term) - end - definition.reverse_property = true - elsif value.key?('@id') && value['@id'].nil? - # Allowed to reserve a null term, which may be protected - elsif value.key?('@id') && value['@id'] != term - raise JsonLdError::InvalidIRIMapping, "expected value of @id to be a string: #{value['@id'].inspect} on term #{term.inspect}" unless - value['@id'].is_a?(String) - - if !KEYWORDS.include?(value['@id'].to_s) && value['@id'].to_s.match?(/^@[a-zA-Z]+$/) && @options[:validate] - warn "Values beginning with '@' are reserved for future use and ignored: #{value['@id']}." - return + # log_debug("") {"container_mapping: #{value['@container'].inspect}"} + definition.container_mapping = check_container(value['@container'], local_context, defined, term) + + # If @container includes @type + if definition.container_mapping.include?('@type') + # If definition does not have @type, set @type to @id + definition.type_mapping ||= '@id' + # If definition includes @type with a value other than @id or @vocab, an illegal type mapping error has been detected + unless CONTEXT_TYPE_ID_VOCAB.include?(definition.type_mapping) + raise JsonLdError::InvalidTypeMapping, "@container: @type requires @type to be @id or @vocab" + end + end end - definition.id = expand_iri(value['@id'], - vocab: true, - local_context: local_context, - defined: defined) - raise JsonLdError::InvalidKeywordAlias, "expected value of @id to not be @context on term #{term.inspect}" if - definition.id == '@context' + if value.key?('@index') + # property-based indexing + unless definition.container_mapping.include?('@index') + raise JsonLdError::InvalidTermDefinition, + "@index without @index in @container: #{value['@index']} on term #{term.inspect}" + end + unless value['@index'].is_a?(String) && !value['@index'].start_with?('@') + raise JsonLdError::InvalidTermDefinition, + "@index must expand to an IRI: #{value['@index']} on term #{term.inspect}" + end - if term.match?(/(?::[^:])|\//) - term_iri = expand_iri(term, - vocab: true, - local_context: local_context, - defined: defined.merge(term => true)) - if term_iri != definition.id - raise JsonLdError::InvalidIRIMapping, "term #{term} expands to #{definition.id}, not #{term_iri}" + definition.index = value['@index'].to_s + end + + if value.key?('@context') + begin + new_ctx = parse(value['@context'], + base: base, + override_protected: true, + remote_contexts: remote_contexts, + validate_scoped: false) + # Record null context in array form + definition.context = case value['@context'] + when String then new_ctx.context_base + when nil then [nil] + else value['@context'] + end + # log_debug("") {"context: #{definition.context.inspect}"} + rescue JsonLdError => e + raise JsonLdError::InvalidScopedContext, + "Term definition for #{term.inspect} contains illegal value for @context: #{e.message}" end end - warn "[DEPRECATION] Blank Node terms deprecated in JSON-LD 1.1." if @options[:validate] && processingMode('json-ld-1.1') && definition.id.to_s.start_with?("_:") + if value.key?('@language') + language = value['@language'] + language = case value['@language'] + when String + # Warn on an invalid language tag, unless :validate is true, in which case it's an error + unless /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/.match?(value['@language']) + warn "@language must be valid BCP47: #{value['@language'].inspect}" + end + options[:lowercaseLanguage] ? value['@language'].downcase : value['@language'] + when nil + nil + else + raise JsonLdError::InvalidLanguageMapping, + "language must be null or a string, was #{value['@language'].inspect}} on term #{term.inspect}" + end + # log_debug("") {"language_mapping: #{language.inspect}"} + definition.language_mapping = language || false + end - # If id ends with a gen-delim, it may be used as a prefix for simple terms - definition.prefix = true if !term.include?(':') && - simple_term && - (definition.id.to_s.end_with?(':', '/', '?', '#', '[', ']', '@') || definition.id.to_s.start_with?('_:')) - elsif term[1..-1].include?(':') - # If term is a compact IRI with a prefix that is a key in local context then a dependency has been found. Use this algorithm recursively passing active context, local context, the prefix as term, and defined. - prefix, suffix = term.split(':', 2) - create_term_definition(local_context, prefix, defined, protected: protected) if local_context.key?(prefix) + if value.key?('@direction') + direction = value['@direction'] + unless direction.nil? || %w[ + ltr rtl + ].include?(direction) + raise JsonLdError::InvalidBaseDirection, + "direction must be null, 'ltr', or 'rtl', was #{language.inspect}} on term #{term.inspect}" + end - definition.id = if td = term_definitions[prefix] - # If term's prefix has a term definition in active context, set the IRI mapping for definition to the result of concatenating the value associated with the prefix's IRI mapping and the term's suffix. - td.id + suffix - else - # Otherwise, term is an absolute IRI. Set the IRI mapping for definition to term - term - end - # log_debug("") {"=> #{definition.id}"} - elsif term.include?('/') - # If term is a relative IRI - definition.id = expand_iri(term, vocab: true) - raise JsonLdError::InvalidKeywordAlias, "expected term to expand to an absolute IRI #{term.inspect}" unless - definition.id.absolute? - elsif KEYWORDS.include?(term) - # This should only happen for @type when @container is @set - definition.id = term - else - # Otherwise, active context must have a vocabulary mapping, otherwise an invalid value has been detected, which is an error. Set the IRI mapping for definition to the result of concatenating the value associated with the vocabulary mapping and term. - raise JsonLdError::InvalidIRIMapping, "relative term definition without vocab: #{term} on term #{term.inspect}" unless vocab - definition.id = vocab + term - # log_debug("") {"=> #{definition.id}"} - end + # log_debug("") {"direction_mapping: #{direction.inspect}"} + definition.direction_mapping = direction || false + end - @iri_to_term[definition.id] = term if simple_term && definition.id + if value.key?('@nest') + nest = value['@nest'] + unless nest.is_a?(String) + raise JsonLdError::InvalidNestValue, + "nest must be a string, was #{nest.inspect}} on term #{term.inspect}" + end + if nest.match?(/^@[a-zA-Z]+$/) && nest != '@nest' + raise JsonLdError::InvalidNestValue, + "nest must not be a keyword other than @nest, was #{nest.inspect}} on term #{term.inspect}" + end + + # log_debug("") {"nest: #{nest.inspect}"} + definition.nest = nest + end + + if value.key?('@prefix') + if term.match?(%r{:|/}) + raise JsonLdError::InvalidTermDefinition, + "@prefix used on compact or relative IRI term #{term.inspect}" + end - if value.key?('@container') - # log_debug("") {"container_mapping: #{value['@container'].inspect}"} - definition.container_mapping = check_container(value['@container'], local_context, defined, term) + case pfx = value['@prefix'] + when TrueClass, FalseClass + definition.prefix = pfx + else + raise JsonLdError::InvalidPrefixValue, "unknown value for '@prefix': #{pfx.inspect} on term #{term.inspect}" + end - # If @container includes @type - if definition.container_mapping.include?('@type') - # If definition does not have @type, set @type to @id - definition.type_mapping ||= '@id' - # If definition includes @type with a value other than @id or @vocab, an illegal type mapping error has been detected - if !CONTEXT_TYPE_ID_VOCAB.include?(definition.type_mapping) - raise JsonLdError::InvalidTypeMapping, "@container: @type requires @type to be @id or @vocab" + if pfx && KEYWORDS.include?(definition.id.to_s) + raise JsonLdError::InvalidTermDefinition, + "keywords may not be used as prefixes" end end + + if previous_definition&.protected? && definition != previous_definition && !override_protected + definition = previous_definition + raise JSON::LD::JsonLdError::ProtectedTermRedefinition, "Attempt to redefine protected term #{term}" + end + + term_definitions[term] = definition + defined[term] = true end - if value.key?('@index') - # property-based indexing - raise JsonLdError::InvalidTermDefinition, "@index without @index in @container: #{value['@index']} on term #{term.inspect}" unless definition.container_mapping.include?('@index') - raise JsonLdError::InvalidTermDefinition, "@index must expand to an IRI: #{value['@index']} on term #{term.inspect}" unless value['@index'].is_a?(String) && !value['@index'].start_with?('@') - definition.index = value['@index'].to_s + ## + # Initial context, without mappings, vocab or default language + # + # @return [Boolean] + def empty? + @term_definitions.empty? && vocab.nil? && default_language.nil? end - if value.key?('@context') - begin - new_ctx = self.parse(value['@context'], - base: base, - override_protected: true, - remote_contexts: remote_contexts, - validate_scoped: false) - # Record null context in array form - definition.context = case value['@context'] - when String then new_ctx.context_base - when nil then [nil] - else value['@context'] - end - # log_debug("") {"context: #{definition.context.inspect}"} - rescue JsonLdError => e - raise JsonLdError::InvalidScopedContext, "Term definition for #{term.inspect} contains illegal value for @context: #{e.message}" + # @param [String] value must be an absolute IRI + def base=(value, **_options) + if value + unless value.is_a?(String) || value.is_a?(RDF::URI) + raise JsonLdError::InvalidBaseIRI, + "@base must be a string: #{value.inspect}" + end + + value = RDF::URI(value) + value = @base.join(value) if @base && value.relative? + # still might be relative to document + @base = value + else + @base = false end end - if value.key?('@language') - language = value['@language'] - language = case value['@language'] + # @param [String] value + def default_language=(value, **options) + @default_language = case value when String # Warn on an invalid language tag, unless :validate is true, in which case it's an error - if value['@language'] !~ /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/ - warn "@language must be valid BCP47: #{value['@language'].inspect}" + unless /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/.match?(value) + warn "@language must be valid BCP47: #{value.inspect}" end - options[:lowercaseLanguage] ? value['@language'].downcase : value['@language'] + options[:lowercaseLanguage] ? value.downcase : value when nil nil else - raise JsonLdError::InvalidLanguageMapping, "language must be null or a string, was #{value['@language'].inspect}} on term #{term.inspect}" + raise JsonLdError::InvalidDefaultLanguage, "@language must be a string: #{value.inspect}" end - # log_debug("") {"language_mapping: #{language.inspect}"} - definition.language_mapping = language || false end - if value.key?('@direction') - direction = value['@direction'] - raise JsonLdError::InvalidBaseDirection, "direction must be null, 'ltr', or 'rtl', was #{language.inspect}} on term #{term.inspect}" unless direction.nil? || %w(ltr rtl).include?(direction) - # log_debug("") {"direction_mapping: #{direction.inspect}"} - definition.direction_mapping = direction || false - end + # @param [String] value + def default_direction=(value, **_options) + @default_direction = if value + unless %w[ + ltr rtl + ].include?(value) + raise JsonLdError::InvalidBaseDirection, + "@direction must be one or 'ltr', or 'rtl': #{value.inspect}" + end - if value.key?('@nest') - nest = value['@nest'] - raise JsonLdError::InvalidNestValue, "nest must be a string, was #{nest.inspect}} on term #{term.inspect}" unless nest.is_a?(String) - raise JsonLdError::InvalidNestValue, "nest must not be a keyword other than @nest, was #{nest.inspect}} on term #{term.inspect}" if nest.match?(/^@[a-zA-Z]+$/) && nest != '@nest' - # log_debug("") {"nest: #{nest.inspect}"} - definition.nest = nest + value + end end - if value.key?('@prefix') - raise JsonLdError::InvalidTermDefinition, "@prefix used on compact or relative IRI term #{term.inspect}" if term.match?(%r{:|/}) - case pfx = value['@prefix'] - when TrueClass, FalseClass - definition.prefix = pfx + ## + # Retrieve, or check processing mode. + # + # * With no arguments, retrieves the current set processingMode. + # * With an argument, verifies that the processingMode is at least that provided, either as an integer, or a string of the form "json-ld-1.x" + # * If expecting 1.1, and not set, it has the side-effect of setting mode to json-ld-1.1. + # + # @param [String, Number] expected (nil) + # @return [String] + def processingMode(expected = nil) + case expected + when 1.0, 'json-ld-1.0' + @processingMode == 'json-ld-1.0' + when 1.1, 'json-ld-1.1' + @processingMode.nil? || @processingMode == 'json-ld-1.1' + when nil + @processingMode || 'json-ld-1.1' else - raise JsonLdError::InvalidPrefixValue, "unknown value for '@prefix': #{pfx.inspect} on term #{term.inspect}" + false end - - raise JsonLdError::InvalidTermDefinition, "keywords may not be used as prefixes" if pfx && KEYWORDS.include?(definition.id.to_s) end - if previous_definition && previous_definition.protected? && definition != previous_definition && !override_protected - definition = previous_definition - raise JSON::LD::JsonLdError::ProtectedTermRedefinition, "Attempt to redefine protected term #{term}" - end + ## + # Set processing mode. + # + # * With an argument, verifies that the processingMode is at least that provided, either as an integer, or a string of the form "json-ld-1.x" + # + # If contex has a @version member, it's value MUST be 1.1, otherwise an "invalid @version value" has been detected, and processing is aborted. + # If processingMode has been set, and it is not "json-ld-1.1", a "processing mode conflict" has been detecting, and processing is aborted. + # + # @param [String, Number] value + # @return [String] + # @raise [JsonLdError::ProcessingModeConflict] + def processingMode=(value = nil, **_options) + value = "json-ld-1.1" if value == 1.1 + case value + when "json-ld-1.0", "json-ld-1.1" + if @processingMode && @processingMode != value + raise JsonLdError::ProcessingModeConflict, "#{value} not compatible with #{@processingMode}" + end - term_definitions[term] = definition - defined[term] = true - end + @processingMode = value + else + raise JsonLdError::InvalidVersionValue, value.inspect + end + end - ## - # Initial context, without mappings, vocab or default language - # - # @return [Boolean] - def empty? - @term_definitions.empty? && self.vocab.nil? && self.default_language.nil? - end + # If context has a @vocab member: if its value is not a valid absolute IRI or null trigger an INVALID_VOCAB_MAPPING error; otherwise set the active context's vocabulary mapping to its value and remove the @vocab member from context. + # @param [String] value must be an absolute IRI + def vocab=(value, **_options) + @vocab = case value + when /_:/ + # BNode vocab is deprecated + if @options[:validate] && processingMode("json-ld-1.1") + warn "[DEPRECATION] Blank Node vocabularies deprecated in JSON-LD 1.1." + end + value + when String, RDF::URI + if RDF::URI(value.to_s).relative? && processingMode("json-ld-1.0") + raise JsonLdError::InvalidVocabMapping, "@vocab must be an absolute IRI in 1.0 mode: #{value.inspect}" + end - # @param [String] value must be an absolute IRI - def base=(value, **options) - if value - raise JsonLdError::InvalidBaseIRI, "@base must be a string: #{value.inspect}" unless value.is_a?(String) || value.is_a?(RDF::URI) - value = RDF::URI(value) - value = @base.join(value) if @base && value.relative? - # still might be relative to document - @base = value - else - @base = false + expand_iri(value.to_s, vocab: true, documentRelative: true) + when nil + nil + else + raise JsonLdError::InvalidVocabMapping, "@vocab must be an IRI: #{value.inspect}" + end end - end + # Set propagation + # @note: by the time this is called, the work has already been done. + # + # @param [Boolean] value + def propagate=(value, **_options) + if processingMode("json-ld-1.0") + raise JsonLdError::InvalidContextEntry, + "@propagate may only be set in 1.1 mode" + end - # @param [String] value - def default_language=(value, **options) - @default_language = case value - when String - # Warn on an invalid language tag, unless :validate is true, in which case it's an error - if value !~ /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/ - warn "@language must be valid BCP47: #{value.inspect}" + unless value.is_a?(TrueClass) || value.is_a?(FalseClass) + raise JsonLdError::InvalidPropagateValue, + "@propagate must be boolean valued: #{value.inspect}" end - options[:lowercaseLanguage] ? value.downcase : value - when nil - nil - else - raise JsonLdError::InvalidDefaultLanguage, "@language must be a string: #{value.inspect}" - end - end - # @param [String] value - def default_direction=(value, **options) - @default_direction = if value - raise JsonLdError::InvalidBaseDirection, "@direction must be one or 'ltr', or 'rtl': #{value.inspect}" unless %w(ltr rtl).include?(value) value - else - nil end - end - ## - # Retrieve, or check processing mode. - # - # * With no arguments, retrieves the current set processingMode. - # * With an argument, verifies that the processingMode is at least that provided, either as an integer, or a string of the form "json-ld-1.x" - # * If expecting 1.1, and not set, it has the side-effect of setting mode to json-ld-1.1. - # - # @param [String, Number] expected (nil) - # @return [String] - def processingMode(expected = nil) - case expected - when 1.0, 'json-ld-1.0' - @processingMode == 'json-ld-1.0' - when 1.1, 'json-ld-1.1' - @processingMode.nil? || @processingMode == 'json-ld-1.1' - when nil - @processingMode || 'json-ld-1.1' - else - false - end - end + ## + # Generate @context + # + # If a context was supplied in global options, use that, otherwise, generate one + # from this representation. + # + # @param [Array, Hash, Context, IO, StringIO] provided_context (nil) + # Original context to use, if available + # @param [Hash{Symbol => Object}] options ({}) + # @return [Hash] + def serialize(provided_context: nil, **_options) + # log_debug("serlialize: generate context") + # log_debug("") {"=> context: #{inspect}"} + use_context = case provided_context + when String, RDF::URI + # log_debug "serlialize: reuse context: #{provided_context.inspect}" + provided_context.to_s + when Hash + # log_debug "serlialize: reuse context: #{provided_context.inspect}" + # If it has an @context entry use it, otherwise it is assumed to be the body of a context + provided_context.fetch('@context', provided_context) + when Array + # log_debug "serlialize: reuse context: #{provided_context.inspect}" + provided_context + when IO, StringIO + load_context(provided_context, **@options).fetch('@context', {}) + else + ctx = {} + ctx['@version'] = 1.1 if @processingMode == 'json-ld-1.1' + ctx['@base'] = base.to_s if base + ctx['@direction'] = default_direction.to_s if default_direction + ctx['@language'] = default_language.to_s if default_language + ctx['@vocab'] = vocab.to_s if vocab + + # Term Definitions + term_definitions.each do |term, defn| + ctx[term] = defn.to_context_definition(self) + end + ctx + end - ## - # Set processing mode. - # - # * With an argument, verifies that the processingMode is at least that provided, either as an integer, or a string of the form "json-ld-1.x" - # - # If contex has a @version member, it's value MUST be 1.1, otherwise an "invalid @version value" has been detected, and processing is aborted. - # If processingMode has been set, and it is not "json-ld-1.1", a "processing mode conflict" has been detecting, and processing is aborted. - # - # @param [String, Number] value - # @return [String] - # @raise [JsonLdError::ProcessingModeConflict] - def processingMode=(value = nil, **options) - value = "json-ld-1.1" if value == 1.1 - case value - when "json-ld-1.0", "json-ld-1.1" - if @processingMode && @processingMode != value - raise JsonLdError::ProcessingModeConflict, "#{value} not compatible with #{@processingMode}" - end - @processingMode = value - else - raise JsonLdError::InvalidVersionValue, value.inspect + # Return hash with @context, or empty + use_context.nil? || use_context.empty? ? {} : { '@context' => use_context } end - end - # If context has a @vocab member: if its value is not a valid absolute IRI or null trigger an INVALID_VOCAB_MAPPING error; otherwise set the active context's vocabulary mapping to its value and remove the @vocab member from context. - # @param [String] value must be an absolute IRI - def vocab=(value, **options) - @vocab = case value - when /_:/ - # BNode vocab is deprecated - warn "[DEPRECATION] Blank Node vocabularies deprecated in JSON-LD 1.1." if @options[:validate] && processingMode("json-ld-1.1") - value - when String, RDF::URI - if (RDF::URI(value.to_s).relative? && processingMode("json-ld-1.0")) - raise JsonLdError::InvalidVocabMapping, "@vocab must be an absolute IRI in 1.0 mode: #{value.inspect}" + ## + # Build a context from an RDF::Vocabulary definition. + # + # @example building from an external vocabulary definition + # + # g = RDF::Graph.load("http://schema.org/docs/schema_org_rdfa.html") + # + # context = JSON::LD::Context.new.from_vocabulary(g, + # vocab: "http://schema.org/", + # prefixes: {schema: "http://schema.org/"}, + # language: "en") + # + # @param [RDF::Queryable] graph + # + # @note requires rdf/vocab gem. + # + # @return [self] + def from_vocabulary(graph) + require 'rdf/vocab' unless RDF.const_defined?(:Vocab) + statements = {} + ranges = {} + + # Add term definitions for each class and property not in schema:, and + # for those properties having an object range + graph.each do |statement| + next if statement.subject.node? + + (statements[statement.subject] ||= []) << statement + + # Keep track of predicate ranges + if [RDF::RDFS.range, RDF::Vocab::SCHEMA.rangeIncludes].include?(statement.predicate) + (ranges[statement.subject] ||= []) << statement.object + end end - expand_iri(value.to_s, vocab: true, documentRelative: true) - when nil - nil - else - raise JsonLdError::InvalidVocabMapping, "@vocab must be an IRI: #{value.inspect}" - end - end - # Set propagation - # @note: by the time this is called, the work has already been done. - # - # @param [Boolean] value - def propagate=(value, **options) - raise JsonLdError::InvalidContextEntry, "@propagate may only be set in 1.1 mode" if processingMode("json-ld-1.0") - raise JsonLdError::InvalidPropagateValue, "@propagate must be boolean valued: #{value.inspect}" unless value.is_a?(TrueClass) || value.is_a?(FalseClass) - value - end + # Add term definitions for each class and property not in vocab, and + # for those properties having an object range + statements.each do |subject, values| + types = values.each_with_object([]) { |v, memo| memo << v.object if v.predicate == RDF.type } + is_property = types.any? { |t| t.to_s.include?("Property") } + + term = subject.to_s.split(%r{[/\#]}).last + + if is_property + prop_ranges = ranges.fetch(subject, []) + # If any range is empty or member of range includes rdfs:Literal or schema:Text + next if (vocab && prop_ranges.empty?) || + prop_ranges.include?(RDF::Vocab::SCHEMA.Text) || + prop_ranges.include?(RDF::RDFS.Literal) + + td = term_definitions[term] = TermDefinition.new(term, id: subject.to_s) + + # Set context typing based on first element in range + case r = prop_ranges.first + when RDF::XSD.string + td.language_mapping = false if default_language + # FIXME: text direction + when RDF::XSD.boolean, RDF::Vocab::SCHEMA.Boolean, RDF::XSD.date, RDF::Vocab::SCHEMA.Date, + RDF::XSD.dateTime, RDF::Vocab::SCHEMA.DateTime, RDF::XSD.time, RDF::Vocab::SCHEMA.Time, + RDF::XSD.duration, RDF::Vocab::SCHEMA.Duration, RDF::XSD.decimal, RDF::Vocab::SCHEMA.Number, + RDF::XSD.float, RDF::Vocab::SCHEMA.Float, RDF::XSD.integer, RDF::Vocab::SCHEMA.Integer + td.type_mapping = r + td.simple = false + else + # It's an object range (includes schema:URL) + td.type_mapping = '@id' + end + else + # Ignore if there's a default voabulary and this is not a property + next if vocab && subject.to_s.start_with?(vocab) - ## - # Generate @context - # - # If a context was supplied in global options, use that, otherwise, generate one - # from this representation. - # - # @param [Array, Hash, Context, IO, StringIO] provided_context (nil) - # Original context to use, if available - # @param [Hash{Symbol => Object}] options ({}) - # @return [Hash] - def serialize(provided_context: nil, **options) - # log_debug("serlialize: generate context") - # log_debug("") {"=> context: #{inspect}"} - use_context = case provided_context - when String, RDF::URI - # log_debug "serlialize: reuse context: #{provided_context.inspect}" - provided_context.to_s - when Hash - # log_debug "serlialize: reuse context: #{provided_context.inspect}" - # If it has an @context entry use it, otherwise it is assumed to be the body of a context - provided_context.fetch('@context', provided_context) - when Array - # log_debug "serlialize: reuse context: #{provided_context.inspect}" - provided_context - when IO, StringIO - load_context(provided_context, **@options).fetch('@context', {}) - else - ctx = {} - ctx['@version'] = 1.1 if @processingMode == 'json-ld-1.1' - ctx['@base'] = base.to_s if base - ctx['@direction'] = default_direction.to_s if default_direction - ctx['@language'] = default_language.to_s if default_language - ctx['@vocab'] = vocab.to_s if vocab - - # Term Definitions - term_definitions.each do |term, defn| - ctx[term] = defn.to_context_definition(self) + # otherwise, create a term definition + td = term_definitions[term] = TermDefinition.new(term, id: subject.to_s) + end end - ctx + + self end - # Return hash with @context, or empty - use_context.nil? || use_context.empty? ? {} : {'@context' => use_context} - end + # Set term mapping + # + # @param [#to_s] term + # @param [RDF::URI, String, nil] value + # + # @return [TermDefinition] + def set_mapping(term, value) + # log_debug("") {"map #{term.inspect} to #{value.inspect}"} + term = term.to_s + term_definitions[term] = + TermDefinition.new(term, id: value, simple: true, prefix: value.to_s.end_with?(*PREFIX_URI_ENDINGS)) + term_definitions[term].simple = true - ## - # Build a context from an RDF::Vocabulary definition. - # - # @example building from an external vocabulary definition - # - # g = RDF::Graph.load("http://schema.org/docs/schema_org_rdfa.html") - # - # context = JSON::LD::Context.new.from_vocabulary(g, - # vocab: "http://schema.org/", - # prefixes: {schema: "http://schema.org/"}, - # language: "en") - # - # @param [RDF::Queryable] graph - # - # @return [self] - def from_vocabulary(graph) - statements = {} - ranges = {} - - # Add term definitions for each class and property not in schema:, and - # for those properties having an object range - graph.each do |statement| - next if statement.subject.node? - (statements[statement.subject] ||= []) << statement - - # Keep track of predicate ranges - if [RDF::RDFS.range, RDF::SCHEMA.rangeIncludes].include?(statement.predicate) - (ranges[statement.subject] ||= []) << statement.object - end + term_sym = term.empty? ? "" : term.to_sym + iri_to_term.delete(term_definitions[term].id.to_s) if term_definitions[term].id.is_a?(String) + @options[:prefixes][term_sym] = value if @options.key?(:prefixes) + iri_to_term[value.to_s] = term + term_definitions[term] end - # Add term definitions for each class and property not in vocab, and - # for those properties having an object range - statements.each do |subject, values| - types = values.each_with_object([]) { |v, memo| memo << v.object if v.predicate == RDF.type } - is_property = types.any? {|t| t.to_s.include?("Property")} - - term = subject.to_s.split(/[\/\#]/).last + ## + # Find a term definition + # + # @param [Term, #to_s] term in unexpanded form + # @return [Term] + def find_definition(term) + term.is_a?(TermDefinition) ? term : term_definitions[term.to_s] + end - if !is_property - # Ignore if there's a default voabulary and this is not a property - next if vocab && subject.to_s.start_with?(vocab) + ## + # Retrieve container mapping, add it if `value` is provided + # + # @param [Term, #to_s] term in unexpanded form + # @return [Array<'@index', '@language', '@index', '@set', '@type', '@id', '@graph'>] + def container(term) + return Set[term] if term == '@list' - # otherwise, create a term definition - td = term_definitions[term] = TermDefinition.new(term, id: subject.to_s) - else - prop_ranges = ranges.fetch(subject, []) - # If any range is empty or member of range includes rdfs:Literal or schema:Text - next if vocab && prop_ranges.empty? || - prop_ranges.include?(RDF::SCHEMA.Text) || - prop_ranges.include?(RDF::RDFS.Literal) - td = term_definitions[term] = TermDefinition.new(term, id: subject.to_s) - - # Set context typing based on first element in range - case r = prop_ranges.first - when RDF::XSD.string - if self.default_language - td.language_mapping = false - end - # FIXME: text direction - when RDF::XSD.boolean, RDF::SCHEMA.Boolean, RDF::XSD.date, RDF::SCHEMA.Date, - RDF::XSD.dateTime, RDF::SCHEMA.DateTime, RDF::XSD.time, RDF::SCHEMA.Time, - RDF::XSD.duration, RDF::SCHEMA.Duration, RDF::XSD.decimal, RDF::SCHEMA.Number, - RDF::XSD.float, RDF::SCHEMA.Float, RDF::XSD.integer, RDF::SCHEMA.Integer - td.type_mapping = r - td.simple = false - else - # It's an object range (includes schema:URL) - td.type_mapping = '@id' - end - end + term = find_definition(term) + term ? term.container_mapping : Set.new end - self - end - - # Set term mapping - # - # @param [#to_s] term - # @param [RDF::URI, String, nil] value - # - # @return [TermDefinition] - def set_mapping(term, value) - # log_debug("") {"map #{term.inspect} to #{value.inspect}"} - term = term.to_s - term_definitions[term] = TermDefinition.new(term, id: value, simple: true, prefix: (value.to_s.end_with?(*PREFIX_URI_ENDINGS))) - term_definitions[term].simple = true - - term_sym = term.empty? ? "" : term.to_sym - iri_to_term.delete(term_definitions[term].id.to_s) if term_definitions[term].id.is_a?(String) - @options[:prefixes][term_sym] = value if @options.key?(:prefixes) - iri_to_term[value.to_s] = term - term_definitions[term] - end + ## + # Retrieve term coercion + # + # @param [Term, #to_s] term in unexpanded form + # @return [RDF::URI, '@id'] + def coerce(term) + # Map property, if it's not an RDF::Value + # @type is always is an IRI + return '@id' if term == RDF.type || term == '@type' - ## - # Find a term definition - # - # @param [Term, #to_s] term in unexpanded form - # @return [Term] - def find_definition(term) - term.is_a?(TermDefinition) ? term : term_definitions[term.to_s] - end + term = find_definition(term) + term&.type_mapping + end - ## - # Retrieve container mapping, add it if `value` is provided - # - # @param [Term, #to_s] term in unexpanded form - # @return [Array<'@index', '@language', '@index', '@set', '@type', '@id', '@graph'>] - def container(term) - return Set[term] if term == '@list' - term = find_definition(term) - term ? term.container_mapping : Set.new - end + ## + # Should values be represented using an array? + # + # @param [Term, #to_s] term in unexpanded form + # @return [Boolean] + def as_array?(term) + return true if CONTEXT_CONTAINER_ARRAY_TERMS.include?(term) - ## - # Retrieve term coercion - # - # @param [Term, #to_s] term in unexpanded form - # @return [RDF::URI, '@id'] - def coerce(term) - # Map property, if it's not an RDF::Value - # @type is always is an IRI - return '@id' if term == RDF.type || term == '@type' - term = find_definition(term) - term && term.type_mapping - end + term = find_definition(term) + term && (term.as_set? || term.container_mapping.include?('@list')) + end - ## - # Should values be represented using an array? - # - # @param [Term, #to_s] term in unexpanded form - # @return [Boolean] - def as_array?(term) - return true if CONTEXT_CONTAINER_ARRAY_TERMS.include?(term) - term = find_definition(term) - term && (term.as_set? || term.container_mapping.include?('@list')) - end + ## + # Retrieve content of a term + # + # @param [Term, #to_s] term in unexpanded form + # @return [Hash] + def content(term) + term = find_definition(term) + term&.content + end - ## - # Retrieve content of a term - # - # @param [Term, #to_s] term in unexpanded form - # @return [Hash] - def content(term) - term = find_definition(term) - term && term.content - end + ## + # Retrieve nest of a term. + # value of nest must be @nest or a term that resolves to @nest + # + # @param [Term, #to_s] term in unexpanded form + # @return [String] Nesting term + # @raise JsonLdError::InvalidNestValue if nesting term exists and is not a term resolving to `@nest` in the current context. + def nest(term) + term = find_definition(term) + return unless term - ## - # Retrieve nest of a term. - # value of nest must be @nest or a term that resolves to @nest - # - # @param [Term, #to_s] term in unexpanded form - # @return [String] Nesting term - # @raise JsonLdError::InvalidNestValue if nesting term exists and is not a term resolving to `@nest` in the current context. - def nest(term) - term = find_definition(term) - if term case term.nest when '@nest', nil - term.nest else - nest_term = find_definition(term.nest) - raise JsonLdError::InvalidNestValue, "nest must a term resolving to @nest, was #{nest_term.inspect}" unless nest_term && nest_term.id == '@nest' - term.nest + nest_term = find_definition(term.nest) + unless nest_term && nest_term.id == '@nest' + raise JsonLdError::InvalidNestValue, + "nest must a term resolving to @nest, was #{nest_term.inspect}" + end + + end + term.nest + end + + ## + # Retrieve the language associated with a term, or the default language otherwise + # @param [Term, #to_s] term in unexpanded form + # @return [String] + def language(term) + term = find_definition(term) + lang = term&.language_mapping + if lang.nil? + @default_language + else + (lang == false ? nil : lang) end end - end - - ## - # Retrieve the language associated with a term, or the default language otherwise - # @param [Term, #to_s] term in unexpanded form - # @return [String] - def language(term) - term = find_definition(term) - lang = term && term.language_mapping - lang.nil? ? @default_language : (lang == false ? nil : lang) - end - - ## - # Retrieve the text direction associated with a term, or the default direction otherwise - # @param [Term, #to_s] term in unexpanded form - # @return [String] - def direction(term) - term = find_definition(term) - dir = term && term.direction_mapping - dir.nil? ? @default_direction : (dir == false ? nil : dir) - end - ## - # Is this a reverse term - # @param [Term, #to_s] term in unexpanded form - # @return [Boolean] - def reverse?(term) - term = find_definition(term) - term && term.reverse_property - end - - ## - # Given a term or IRI, find a reverse term definition matching that term. If the term is already reversed, find a non-reversed version. - # - # @param [Term, #to_s] term - # @return [Term] related term definition - def reverse_term(term) - # Direct lookup of term - term = term_definitions[term.to_s] if term_definitions.key?(term.to_s) && !term.is_a?(TermDefinition) - - # Lookup term, assuming term is an IRI - unless term.is_a?(TermDefinition) - td = term_definitions.values.detect {|t| t.id == term.to_s} - - # Otherwise create a temporary term definition - term = td || TermDefinition.new(term.to_s, id: expand_iri(term, vocab:true)) + ## + # Retrieve the text direction associated with a term, or the default direction otherwise + # @param [Term, #to_s] term in unexpanded form + # @return [String] + def direction(term) + term = find_definition(term) + dir = term&.direction_mapping + if dir.nil? + @default_direction + else + (dir == false ? nil : dir) + end end - # Now, return a term, which reverses this term - term_definitions.values.detect {|t| t.id == term.id && t.reverse_property != term.reverse_property} - end - - ## - # Expand an IRI. Relative IRIs are expanded against any document base. - # - # @param [String] value - # A keyword, term, prefix:suffix or possibly relative IRI - # @param [Boolean] as_string (false) transform RDF::Resource values to string - # @param [String, RDF::URI] base for resolving document-relative IRIs - # @param [Hash] defined - # Used during Context Processing. - # @param [Boolean] documentRelative (false) - # @param [Hash] local_context - # Used during Context Processing. - # @param [Boolean] vocab (false) - # @param [Hash{Symbol => Object}] options - # @return [RDF::Resource, String] - # IRI or String, if it's a keyword - # @raise [JSON::LD::JsonLdError::InvalidIRIMapping] if the value cannot be expanded - # @see https://www.w3.org/TR/json-ld11-api/#iri-expansion - def expand_iri(value, - as_string: false, - base: nil, - defined: nil, - documentRelative: false, - local_context: nil, - vocab: false, - **options) - return (value && as_string ? value.to_s : value) unless value.is_a?(String) - - return value if KEYWORDS.include?(value) - return nil if value.match?(/^@[a-zA-Z]+$/) - - defined = defined || {} # if we initialized in the keyword arg we would allocate {} at each invokation, even in the 2 (common) early returns above. - - # If local context is not null, it contains a key that equals value, and the value associated with the key that equals value in defined is not true, then invoke the Create Term Definition subalgorithm, passing active context, local context, value as term, and defined. This will ensure that a term definition is created for value in active context during Context Processing. - if local_context && local_context.key?(value) && !defined[value] - create_term_definition(local_context, value, defined) + ## + # Is this a reverse term + # @param [Term, #to_s] term in unexpanded form + # @return [Boolean] + def reverse?(term) + term = find_definition(term) + term&.reverse_property end - if (v_td = term_definitions[value]) && KEYWORDS.include?(v_td.id) - return (as_string ? v_td.id.to_s : v_td.id) - end + ## + # Given a term or IRI, find a reverse term definition matching that term. If the term is already reversed, find a non-reversed version. + # + # @param [Term, #to_s] term + # @return [Term] related term definition + def reverse_term(term) + # Direct lookup of term + term = term_definitions[term.to_s] if term_definitions.key?(term.to_s) && !term.is_a?(TermDefinition) + + # Lookup term, assuming term is an IRI + unless term.is_a?(TermDefinition) + td = term_definitions.values.detect { |t| t.id == term.to_s } + + # Otherwise create a temporary term definition + term = td || TermDefinition.new(term.to_s, id: expand_iri(term, vocab: true)) + end - # If active context has a term definition for value, and the associated mapping is a keyword, return that keyword. - # If vocab is true and the active context has a term definition for value, return the associated IRI mapping. - if (v_td = term_definitions[value]) && (vocab || KEYWORDS.include?(v_td.id)) - iri = base && v_td.id ? base.join(v_td.id) : v_td.id # vocab might be doc relative - return (as_string ? iri.to_s : iri) + # Now, return a term, which reverses this term + term_definitions.values.detect { |t| t.id == term.id && t.reverse_property != term.reverse_property } end - # If value contains a colon (:), it is either an absolute IRI or a compact IRI: - if value[1..-1].to_s.include?(':') - prefix, suffix = value.split(':', 2) - - # If prefix is underscore (_) or suffix begins with double-forward-slash (//), return value as it is already an absolute IRI or a blank node identifier. - if prefix == '_' - v = RDF::Node.new(namer.get_sym(suffix)) - return (as_string ? v.to_s : v) - end - if suffix.start_with?('//') - v = RDF::URI(value) - return (as_string ? v.to_s : v) + ## + # Expand an IRI. Relative IRIs are expanded against any document base. + # + # @param [String] value + # A keyword, term, prefix:suffix or possibly relative IRI + # @param [Boolean] as_string (false) transform RDF::Resource values to string + # @param [String, RDF::URI] base for resolving document-relative IRIs + # @param [Hash] defined + # Used during Context Processing. + # @param [Boolean] documentRelative (false) + # @param [Hash] local_context + # Used during Context Processing. + # @param [Boolean] vocab (false) + # @param [Hash{Symbol => Object}] options + # @return [RDF::Resource, String] + # IRI or String, if it's a keyword + # @raise [JSON::LD::JsonLdError::InvalidIRIMapping] if the value cannot be expanded + # @see https://www.w3.org/TR/json-ld11-api/#iri-expansion + def expand_iri(value, + as_string: false, + base: nil, + defined: nil, + documentRelative: false, + local_context: nil, + vocab: false, + **_options) + return (value && as_string ? value.to_s : value) unless value.is_a?(String) + + return value if KEYWORDS.include?(value) + return nil if value.match?(/^@[a-zA-Z]+$/) + + defined ||= {} # if we initialized in the keyword arg we would allocate {} at each invokation, even in the 2 (common) early returns above. + + # If local context is not null, it contains a key that equals value, and the value associated with the key that equals value in defined is not true, then invoke the Create Term Definition subalgorithm, passing active context, local context, value as term, and defined. This will ensure that a term definition is created for value in active context during Context Processing. + create_term_definition(local_context, value, defined) if local_context&.key?(value) && !defined[value] + + if (v_td = term_definitions[value]) && KEYWORDS.include?(v_td.id) + return (as_string ? v_td.id.to_s : v_td.id) end - # If local context is not null, it contains a key that equals prefix, and the value associated with the key that equals prefix in defined is not true, invoke the Create Term Definition algorithm, passing active context, local context, prefix as term, and defined. This will ensure that a term definition is created for prefix in active context during Context Processing. - if local_context && local_context.key?(prefix) && !defined[prefix] - create_term_definition(local_context, prefix, defined) + # If active context has a term definition for value, and the associated mapping is a keyword, return that keyword. + # If vocab is true and the active context has a term definition for value, return the associated IRI mapping. + if (v_td = term_definitions[value]) && (vocab || KEYWORDS.include?(v_td.id)) + iri = base && v_td.id ? base.join(v_td.id) : v_td.id # vocab might be doc relative + return (as_string ? iri.to_s : iri) end - # If active context contains a term definition for prefix, return the result of concatenating the IRI mapping associated with prefix and suffix. - if (td = term_definitions[prefix]) && !td.id.nil? && td.prefix? - return (as_string ? td.id.to_s : td.id) + suffix - elsif RDF::URI(value).absolute? - # Otherwise, if the value has the form of an absolute IRI, return it - return (as_string ? value.to_s : RDF::URI(value)) - else - # Otherwise, it is a relative IRI + # If value contains a colon (:), it is either an absolute IRI or a compact IRI: + if value[1..].to_s.include?(':') + prefix, suffix = value.split(':', 2) + + # If prefix is underscore (_) or suffix begins with double-forward-slash (//), return value as it is already an absolute IRI or a blank node identifier. + if prefix == '_' + v = RDF::Node.new(namer.get_sym(suffix)) + return (as_string ? v.to_s : v) + end + if suffix.start_with?('//') + v = RDF::URI(value) + return (as_string ? v.to_s : v) + end + + # If local context is not null, it contains a key that equals prefix, and the value associated with the key that equals prefix in defined is not true, invoke the Create Term Definition algorithm, passing active context, local context, prefix as term, and defined. This will ensure that a term definition is created for prefix in active context during Context Processing. + create_term_definition(local_context, prefix, defined) if local_context&.key?(prefix) && !defined[prefix] + + # If active context contains a term definition for prefix, return the result of concatenating the IRI mapping associated with prefix and suffix. + if (td = term_definitions[prefix]) && !td.id.nil? && td.prefix? + return (as_string ? td.id.to_s : td.id) + suffix + elsif RDF::URI(value).absolute? + # Otherwise, if the value has the form of an absolute IRI, return it + return (as_string ? value.to_s : RDF::URI(value)) + end end - end - iri = value.is_a?(RDF::URI) ? value : RDF::URI(value) - result = if vocab && self.vocab - # If vocab is true, and active context has a vocabulary mapping, return the result of concatenating the vocabulary mapping with value. - # Note that @vocab could still be relative to a document base - (base && self.vocab.is_a?(RDF::URI) && self.vocab.relative? ? base.join(self.vocab) : self.vocab) + value - elsif documentRelative - if iri.absolute? - iri - elsif self.base.is_a?(RDF::URI) && self.base.absolute? - self.base.join(iri) - elsif self.base == false - # No resollution of `@base: null` - iri - elsif base && self.base - base.join(self.base).join(iri) - elsif base - base.join(iri) + iri = value.is_a?(RDF::URI) ? value : RDF::URI(value) + result = if vocab && self.vocab + # If vocab is true, and active context has a vocabulary mapping, return the result of concatenating the vocabulary mapping with value. + # Note that @vocab could still be relative to a document base + (base && self.vocab.is_a?(RDF::URI) && self.vocab.relative? ? base.join(self.vocab) : self.vocab) + value + elsif documentRelative + if iri.absolute? + iri + elsif self.base.is_a?(RDF::URI) && self.base.absolute? + self.base.join(iri) + elsif self.base == false + # No resollution of `@base: null` + iri + elsif base && self.base + base.join(self.base).join(iri) + elsif base + base.join(iri) + else + # Returns a relative IRI in an odd case. + iri + end + elsif local_context && iri.relative? + # If local context is not null and value is not an absolute IRI, an invalid IRI mapping error has been detected and processing is aborted. + raise JSON::LD::JsonLdError::InvalidIRIMapping, "not an absolute IRI: #{value}" else - # Returns a relative IRI in an odd case. iri end - elsif local_context && iri.relative? - # If local context is not null and value is not an absolute IRI, an invalid IRI mapping error has been detected and processing is aborted. - raise JSON::LD::JsonLdError::InvalidIRIMapping, "not an absolute IRI: #{value}" - else - iri + result && as_string ? result.to_s : result end - result && as_string ? result.to_s : result - end - # The following constants are used to reduce object allocations in #compact_iri below - CONTAINERS_GRAPH = %w(@graph@id @graph@id@set).freeze - CONTAINERS_GRAPH_INDEX = %w(@graph@index @graph@index@set).freeze - CONTAINERS_GRAPH_INDEX_INDEX = %w(@graph@index @graph@index@set @index @index@set).freeze - CONTAINERS_GRAPH_SET = %w(@graph @graph@set @set).freeze - CONTAINERS_ID_TYPE = %w(@id @id@set @type @set@type).freeze - CONTAINERS_ID_VOCAB = %w(@id @vocab @none).freeze - CONTAINERS_INDEX_SET = %w(@index @index@set).freeze - CONTAINERS_LANGUAGE = %w(@language @language@set).freeze - CONTAINERS_VALUE = %w(@value).freeze - CONTAINERS_VOCAB_ID = %w(@vocab @id @none).freeze - - ## - # Compacts an absolute IRI to the shortest matching term or compact IRI - # - # @param [RDF::URI] iri - # @param [String, RDF::URI] base for resolving document-relative IRIs - # @param [Object] value - # Value, used to select among various maps for the same IRI - # @param [Boolean] reverse - # specifies whether a reverse property is being compacted - # @param [Boolean] vocab - # specifies whether the passed iri should be compacted using the active context's vocabulary mapping - # - # @return [String] compacted form of IRI - # @see https://www.w3.org/TR/json-ld11-api/#iri-compaction - def compact_iri(iri, base: nil, reverse: false, value: nil, vocab: nil) - return if iri.nil? - iri = iri.to_s - - if vocab && inverse_context.key?(iri) - default_language = if self.default_direction - "#{self.default_language}_#{self.default_direction}".downcase - else - (self.default_language || "@none").downcase - end - containers = [] - tl, tl_value = "@language", "@null" - containers.concat(CONTAINERS_INDEX_SET) if index?(value) && !graph?(value) - - # If the value is a JSON Object with the key @preserve, use the value of @preserve. - value = value['@preserve'].first if value.is_a?(Hash) && value.key?('@preserve') - - if reverse - tl, tl_value = "@type", "@reverse" - containers << '@set' - elsif list?(value) - # if value is a list object, then set type/language and type/language value to the most specific values that work for all items in the list as follows: - containers << "@list" unless index?(value) - list = value['@list'] - common_type = nil - common_language = default_language if list.empty? - list.each do |item| - item_language, item_type = "@none", "@none" - if value?(item) - if item.key?('@direction') - item_language = "#{item['@language']}_#{item['@direction']}".downcase - elsif item.key?('@language') - item_language = item['@language'].downcase - elsif item.key?('@type') - item_type = item['@type'] + # The following constants are used to reduce object allocations in #compact_iri below + CONTAINERS_GRAPH = %w[@graph@id @graph@id@set].freeze + CONTAINERS_GRAPH_INDEX = %w[@graph@index @graph@index@set].freeze + CONTAINERS_GRAPH_INDEX_INDEX = %w[@graph@index @graph@index@set @index @index@set].freeze + CONTAINERS_GRAPH_SET = %w[@graph @graph@set @set].freeze + CONTAINERS_ID_TYPE = %w[@id @id@set @type @set@type].freeze + CONTAINERS_ID_VOCAB = %w[@id @vocab @none].freeze + CONTAINERS_INDEX_SET = %w[@index @index@set].freeze + CONTAINERS_LANGUAGE = %w[@language @language@set].freeze + CONTAINERS_VALUE = %w[@value].freeze + CONTAINERS_VOCAB_ID = %w[@vocab @id @none].freeze + + ## + # Compacts an absolute IRI to the shortest matching term or compact IRI + # + # @param [RDF::URI] iri + # @param [String, RDF::URI] base for resolving document-relative IRIs + # @param [Object] value + # Value, used to select among various maps for the same IRI + # @param [Boolean] reverse + # specifies whether a reverse property is being compacted + # @param [Boolean] vocab + # specifies whether the passed iri should be compacted using the active context's vocabulary mapping + # + # @return [String] compacted form of IRI + # @see https://www.w3.org/TR/json-ld11-api/#iri-compaction + def compact_iri(iri, base: nil, reverse: false, value: nil, vocab: nil) + return if iri.nil? + + iri = iri.to_s + + if vocab && inverse_context.key?(iri) + default_language = if default_direction + "#{self.default_language}_#{default_direction}".downcase + else + (self.default_language || "@none").downcase + end + containers = [] + tl = "@language" + tl_value = "@null" + containers.concat(CONTAINERS_INDEX_SET) if index?(value) && !graph?(value) + + # If the value is a JSON Object with the key @preserve, use the value of @preserve. + value = value['@preserve'].first if value.is_a?(Hash) && value.key?('@preserve') + + if reverse + tl = "@type" + tl_value = "@reverse" + containers << '@set' + elsif list?(value) + # if value is a list object, then set type/language and type/language value to the most specific values that work for all items in the list as follows: + containers << "@list" unless index?(value) + list = value['@list'] + common_type = nil + common_language = default_language if list.empty? + list.each do |item| + item_language = "@none" + item_type = "@none" + if value?(item) + if item.key?('@direction') + item_language = "#{item['@language']}_#{item['@direction']}".downcase + elsif item.key?('@language') + item_language = item['@language'].downcase + elsif item.key?('@type') + item_type = item['@type'] + else + item_language = "@null" + end else - item_language = "@null" + item_type = '@id' end - else - item_type = '@id' - end - common_language ||= item_language - if item_language != common_language && value?(item) - common_language = '@none' + common_language ||= item_language + common_language = '@none' if item_language != common_language && value?(item) + common_type ||= item_type + common_type = '@none' if item_type != common_type end - common_type ||= item_type - if item_type != common_type - common_type = '@none' - end - end - common_language ||= '@none' - common_type ||= '@none' - if common_type != '@none' - tl, tl_value = '@type', common_type + common_language ||= '@none' + common_type ||= '@none' + if common_type == '@none' + tl_value = common_language + else + tl = '@type' + tl_value = common_type + end + elsif graph?(value) + # Prefer @index and @id containers, then @graph, then @index + containers.concat(CONTAINERS_GRAPH_INDEX_INDEX) if index?(value) + containers.concat(CONTAINERS_GRAPH) if value.key?('@id') + + # Prefer an @graph container next + containers.concat(CONTAINERS_GRAPH_SET) + + # Lastly, in 1.1, any graph can be indexed on @index or @id, so add if we haven't already + containers.concat(CONTAINERS_GRAPH_INDEX) unless index?(value) + containers.concat(CONTAINERS_GRAPH) unless value.key?('@id') + containers.concat(CONTAINERS_INDEX_SET) unless index?(value) + containers << '@set' + + tl = '@type' + tl_value = '@id' else - tl_value = common_language + if value?(value) + # In 1.1, an language map can be used to index values using @none + if value.key?('@language') && !index?(value) + tl_value = value['@language'].downcase + tl_value += "_#{value['@direction']}" if value['@direction'] + containers.concat(CONTAINERS_LANGUAGE) + elsif value.key?('@direction') && !index?(value) + tl_value = "_#{value['@direction']}" + elsif value.key?('@type') + tl_value = value['@type'] + tl = '@type' + end + else + # In 1.1, an id or type map can be used to index values using @none + containers.concat(CONTAINERS_ID_TYPE) + tl = '@type' + tl_value = '@id' + end + containers << '@set' end - elsif graph?(value) - # Prefer @index and @id containers, then @graph, then @index - containers.concat(CONTAINERS_GRAPH_INDEX_INDEX) if index?(value) - containers.concat(CONTAINERS_GRAPH) if value.key?('@id') - # Prefer an @graph container next - containers.concat(CONTAINERS_GRAPH_SET) + containers << '@none' - # Lastly, in 1.1, any graph can be indexed on @index or @id, so add if we haven't already - containers.concat(CONTAINERS_GRAPH_INDEX) unless index?(value) - containers.concat(CONTAINERS_GRAPH) unless value.key?('@id') + # In 1.1, an index map can be used to index values using @none, so add as a low priority containers.concat(CONTAINERS_INDEX_SET) unless index?(value) - containers << '@set' - - tl, tl_value = '@type', '@id' - else - if value?(value) - # In 1.1, an language map can be used to index values using @none - if value.key?('@language') && !index?(value) - tl_value = value['@language'].downcase - tl_value += "_#{value['@direction']}" if value['@direction'] - containers.concat(CONTAINERS_LANGUAGE) - elsif value.key?('@direction') && !index?(value) - tl_value = "_#{value['@direction']}" - elsif value.key?('@type') - tl_value = value['@type'] - tl = '@type' + # Values without type or language can use @language map + containers.concat(CONTAINERS_LANGUAGE) if value?(value) && value.keys == CONTAINERS_VALUE + + tl_value ||= '@null' + preferred_values = [] + preferred_values << '@reverse' if tl_value == '@reverse' + if ['@id', '@reverse'].include?(tl_value) && value.is_a?(Hash) && value.key?('@id') + t_iri = compact_iri(value['@id'], vocab: true, base: base) + if (r_td = term_definitions[t_iri]) && r_td.id == value['@id'] + preferred_values.concat(CONTAINERS_VOCAB_ID) + else + preferred_values.concat(CONTAINERS_ID_VOCAB) end else - # In 1.1, an id or type map can be used to index values using @none - containers.concat(CONTAINERS_ID_TYPE) - tl, tl_value = '@type', '@id' + tl = '@any' if list?(value) && value['@list'].empty? + preferred_values.concat([tl_value, '@none'].compact) end - containers << '@set' - end - - containers << '@none' - - # In 1.1, an index map can be used to index values using @none, so add as a low priority - containers.concat(CONTAINERS_INDEX_SET) unless index?(value) - # Values without type or language can use @language map - containers.concat(CONTAINERS_LANGUAGE) if value?(value) && value.keys == CONTAINERS_VALUE + preferred_values << '@any' - tl_value ||= '@null' - preferred_values = [] - preferred_values << '@reverse' if tl_value == '@reverse' - if (tl_value == '@id' || tl_value == '@reverse') && value.is_a?(Hash) && value.key?('@id') - t_iri = compact_iri(value['@id'], vocab: true, base: base) - if (r_td = term_definitions[t_iri]) && r_td.id == value['@id'] - preferred_values.concat(CONTAINERS_VOCAB_ID) - else - preferred_values.concat(CONTAINERS_ID_VOCAB) + # if containers included `@language` and preferred_values includes something of the form language-tag_direction, add just the _direction part, to select terms that have that direction. + if (lang_dir = preferred_values.detect { |v| v.include?('_') }) + preferred_values << ('_' + lang_dir.split('_').last) end - else - tl = '@any' if list?(value) && value['@list'].empty? - preferred_values.concat([tl_value, '@none'].compact) - end - preferred_values << '@any' - # if containers included `@language` and preferred_values includes something of the form language-tag_direction, add just the _direction part, to select terms that have that direction. - if lang_dir = preferred_values.detect {|v| v.include?('_')} - preferred_values << '_' + lang_dir.split('_').last + if (p_term = select_term(iri, containers, tl, preferred_values)) + return p_term + end end - if p_term = select_term(iri, containers, tl, preferred_values) - return p_term + # At this point, there is no simple term that iri can be compacted to. If vocab is true and active context has a vocabulary mapping: + if vocab && self.vocab && iri.start_with?(self.vocab) && iri.length > self.vocab.length + suffix = iri[self.vocab.length..] + return suffix unless term_definitions.key?(suffix) end - end - # At this point, there is no simple term that iri can be compacted to. If vocab is true and active context has a vocabulary mapping: - if vocab && self.vocab && iri.start_with?(self.vocab) && iri.length > self.vocab.length - suffix = iri[self.vocab.length..-1] - return suffix unless term_definitions.key?(suffix) - end + # The iri could not be compacted using the active context's vocabulary mapping. Try to create a compact IRI, starting by initializing compact IRI to null. This variable will be used to tore the created compact IRI, if any. + candidates = [] - # The iri could not be compacted using the active context's vocabulary mapping. Try to create a compact IRI, starting by initializing compact IRI to null. This variable will be used to tore the created compact IRI, if any. - candidates = [] + term_definitions.each do |term, td| + # Skip term if `@prefix` is not true in term definition + next unless td&.prefix? - term_definitions.each do |term, td| - next if td.nil? || td.id.nil? || td.id == iri || !iri.start_with?(td.id) + next if td&.id.nil? || td.id == iri || !td.match_iri?(iri) - # Skip term if `@prefix` is not true in term definition - next unless td.prefix? + suffix = iri[td.id.length..] + ciri = "#{term}:#{suffix}" + candidates << ciri unless value && term_definitions.key?(ciri) + end - suffix = iri[td.id.length..-1] - ciri = "#{term}:#{suffix}" - candidates << ciri unless value && term_definitions.key?(ciri) - end + return candidates.min unless candidates.empty? + + # If we still don't have any terms and we're using standard_prefixes, + # try those, and add to mapping + if @options[:standard_prefixes] + candidates = RDF::Vocabulary + .select { |v| iri.start_with?(v.to_uri.to_s) && iri != v.to_uri.to_s } + .map do |v| + prefix = v.__name__.to_s.split('::').last.downcase + set_mapping(prefix, v.to_uri.to_s) + iri.sub(v.to_uri.to_s, "#{prefix}:").sub(/:$/, '') + end - return candidates.sort.first if !candidates.empty? + return candidates.min unless candidates.empty? + end - # If we still don't have any terms and we're using standard_prefixes, - # try those, and add to mapping - if @options[:standard_prefixes] - candidates = RDF::Vocabulary. - select {|v| iri.start_with?(v.to_uri.to_s) && iri != v.to_uri.to_s}. - map do |v| - prefix = v.__name__.to_s.split('::').last.downcase - set_mapping(prefix, v.to_uri.to_s) - iri.sub(v.to_uri.to_s, "#{prefix}:").sub(/:$/, '') - end + # If iri could be confused with a compact IRI using a term in this context, signal an error + term_definitions.each do |term, td| + next unless td.prefix? && td.match_compact_iri?(iri) - return candidates.sort.first if !candidates.empty? - end + raise JSON::LD::JsonLdError::IRIConfusedWithPrefix, "Absolute IRI '#{iri}' confused with prefix '#{term}'" + end - # If iri could be confused with a compact IRI using a term in this context, signal an error - term_definitions.each do |term, td| - next unless iri.to_s.start_with?("#{term}:") && td.prefix? - raise JSON::LD::JsonLdError:: IRIConfusedWithPrefix, "Absolute IRI '#{iri}' confused with prefix '#{term}'" - end + return iri if vocab - if !vocab # transform iri to a relative IRI using the document's base IRI iri = remove_base(self.base || base, iri) # Make . relative if it has the form of a keyword. iri = "./#{iri}" if iri.match?(/^@[a-zA-Z]+$/) - return iri - else - return iri - end - end - ## - # If active property has a type mapping in the active context set to @id or @vocab, a JSON object with a single member @id whose value is the result of using the IRI Expansion algorithm on value is returned. - # - # Otherwise, the result will be a JSON object containing an @value member whose value is the passed value. Additionally, an @type member will be included if there is a type mapping associated with the active property or an @language member if value is a string and there is language mapping associated with the active property. - # - # @param [String] property - # Associated property used to find coercion rules - # @param [Hash, String] value - # Value (literal or IRI) to be expanded - # @param [Boolean] useNativeTypes (false) use native representations - # @param [Boolean] rdfDirection (nil) decode i18n datatype if i18n-datatype - # @param [String, RDF::URI] base for resolving document-relative IRIs - # @param [Hash{Symbol => Object}] options - # - # @return [Hash] Object representation of value - # @raise [RDF::ReaderError] if the iri cannot be expanded - # @see https://www.w3.org/TR/json-ld11-api/#value-expansion - def expand_value(property, value, useNativeTypes: false, rdfDirection: nil, base: nil, **options) - td = term_definitions.fetch(property, TermDefinition.new(property)) - - # If the active property has a type mapping in active context that is @id, return a new JSON object containing a single key-value pair where the key is @id and the value is the result of using the IRI Expansion algorithm, passing active context, value, and true for document relative. - if value.is_a?(String) && td.type_mapping == '@id' - # log_debug("") {"as relative IRI: #{value.inspect}"} - return {'@id' => expand_iri(value, documentRelative: true, base: base).to_s} + iri end - # If active property has a type mapping in active context that is @vocab, return a new JSON object containing a single key-value pair where the key is @id and the value is the result of using the IRI Expansion algorithm, passing active context, value, true for vocab, and true for document relative. - if value.is_a?(String) && td.type_mapping == '@vocab' - return {'@id' => expand_iri(value, vocab: true, documentRelative: true, base: base).to_s} - end + ## + # If active property has a type mapping in the active context set to @id or @vocab, a JSON object with a single member @id whose value is the result of using the IRI Expansion algorithm on value is returned. + # + # Otherwise, the result will be a JSON object containing an @value member whose value is the passed value. Additionally, an @type member will be included if there is a type mapping associated with the active property or an @language member if value is a string and there is language mapping associated with the active property. + # + # @param [String] property + # Associated property used to find coercion rules + # @param [Hash, String] value + # Value (literal or IRI) to be expanded + # @param [Boolean] useNativeTypes (false) use native representations + # @param [Boolean] rdfDirection (nil) decode i18n datatype if i18n-datatype + # @param [String, RDF::URI] base for resolving document-relative IRIs + # @param [Hash{Symbol => Object}] options + # + # @return [Hash] Object representation of value + # @raise [RDF::ReaderError] if the iri cannot be expanded + # @see https://www.w3.org/TR/json-ld11-api/#value-expansion + def expand_value(property, value, useNativeTypes: false, rdfDirection: nil, base: nil, **_options) + td = term_definitions.fetch(property, TermDefinition.new(property)) + + # If the active property has a type mapping in active context that is @id, return a new JSON object containing a single key-value pair where the key is @id and the value is the result of using the IRI Expansion algorithm, passing active context, value, and true for document relative. + if value.is_a?(String) && td.type_mapping == '@id' + # log_debug("") {"as relative IRI: #{value.inspect}"} + return { '@id' => expand_iri(value, documentRelative: true, base: base).to_s } + end - result = case value - when RDF::URI, RDF::Node - {'@id' => value.to_s} - when Date, DateTime, Time - lit = RDF::Literal.new(value) - {'@value' => lit.to_s, '@type' => lit.datatype.to_s} - else - # Otherwise, initialize result to a JSON object with an @value member whose value is set to value. - res = {} - - if td.type_mapping && !CONTAINERS_ID_VOCAB.include?(td.type_mapping.to_s) - res['@type'] = td.type_mapping.to_s - elsif value.is_a?(String) - language = language(property) - direction = direction(property) - res['@language'] = language if language - res['@direction'] = direction if direction - end - - res.merge('@value' => value) - end + # If active property has a type mapping in active context that is @vocab, return a new JSON object containing a single key-value pair where the key is @id and the value is the result of using the IRI Expansion algorithm, passing active context, value, true for vocab, and true for document relative. + if value.is_a?(String) && td.type_mapping == '@vocab' + return { '@id' => expand_iri(value, vocab: true, documentRelative: true, base: base).to_s } + end - result - end + case value + when RDF::URI, RDF::Node + { '@id' => value.to_s } + when Date, DateTime, Time + lit = RDF::Literal.new(value) + { '@value' => lit.to_s, '@type' => lit.datatype.to_s } + else + # Otherwise, initialize result to a JSON object with an @value member whose value is set to value. + res = {} + + if td.type_mapping && !CONTAINERS_ID_VOCAB.include?(td.type_mapping.to_s) + res['@type'] = td.type_mapping.to_s + elsif value.is_a?(String) + language = language(property) + direction = direction(property) + res['@language'] = language if language + res['@direction'] = direction if direction + end - ## - # Compact a value - # - # @param [String] property - # Associated property used to find coercion rules - # @param [Hash] value - # Value (literal or IRI), in full object representation, to be compacted - # @param [String, RDF::URI] base for resolving document-relative IRIs - # - # @return [Hash] Object representation of value - # @raise [JsonLdError] if the iri cannot be expanded - # @see https://www.w3.org/TR/json-ld11-api/#value-compaction - # FIXME: revisit the specification version of this. - def compact_value(property, value, base: nil) - # log_debug("compact_value") {"property: #{property.inspect}, value: #{value.inspect}"} - - indexing = index?(value) && container(property).include?('@index') - language = language(property) - direction = direction(property) - - result = case - when coerce(property) == '@id' && value.key?('@id') && (value.keys - %w(@id @index)).empty? - # Compact an @id coercion - # log_debug("") {" (@id & coerce)"} - compact_iri(value['@id'], base: base) - when coerce(property) == '@vocab' && value.key?('@id') && (value.keys - %w(@id @index)).empty? - # Compact an @id coercion - # log_debug("") {" (@id & coerce & vocab)"} - compact_iri(value['@id'], vocab: true) - when value.key?('@id') - # log_debug("") {" (@id)"} - # return value as is - value - when value['@type'] && value['@type'] == coerce(property) - # Compact common datatype - # log_debug("") {" (@type & coerce) == #{coerce(property)}"} - value['@value'] - when coerce(property) == '@none' || value['@type'] - # use original expanded value - value - when !value['@value'].is_a?(String) - # log_debug("") {" (native)"} - indexing || !index?(value) ? value['@value'] : value - when value['@language'].to_s.downcase == language.to_s.downcase && value['@direction'] == direction - # Compact language and direction - indexing || !index?(value) ? value['@value'] : value - else - value + res.merge('@value' => value) + end end - if result.is_a?(Hash) && result.key?('@type') && value['@type'] != '@json' - # Compact values of @type - c_type = if result['@type'].is_a?(Array) - result['@type'].map {|t| compact_iri(t, vocab: true)} + ## + # Compact a value + # + # @param [String] property + # Associated property used to find coercion rules + # @param [Hash] value + # Value (literal or IRI), in full object representation, to be compacted + # @param [String, RDF::URI] base for resolving document-relative IRIs + # + # @return [Hash] Object representation of value + # @raise [JsonLdError] if the iri cannot be expanded + # @see https://www.w3.org/TR/json-ld11-api/#value-compaction + # FIXME: revisit the specification version of this. + def compact_value(property, value, base: nil) + # log_debug("compact_value") {"property: #{property.inspect}, value: #{value.inspect}"} + + indexing = index?(value) && container(property).include?('@index') + language = language(property) + direction = direction(property) + + result = if coerce(property) == '@id' && value.key?('@id') && (value.keys - %w[@id @index]).empty? + # Compact an @id coercion + # log_debug("") {" (@id & coerce)"} + compact_iri(value['@id'], base: base) + elsif coerce(property) == '@vocab' && value.key?('@id') && (value.keys - %w[@id @index]).empty? + # Compact an @id coercion + # log_debug("") {" (@id & coerce & vocab)"} + compact_iri(value['@id'], vocab: true) + elsif value.key?('@id') + # log_debug("") {" (@id)"} + # return value as is + value + elsif value['@type'] && value['@type'] == coerce(property) + # Compact common datatype + # log_debug("") {" (@type & coerce) == #{coerce(property)}"} + value['@value'] + elsif coerce(property) == '@none' || value['@type'] + # use original expanded value + value + elsif !value['@value'].is_a?(String) + # log_debug("") {" (native)"} + indexing || !index?(value) ? value['@value'] : value + elsif value['@language'].to_s.casecmp(language.to_s).zero? && value['@direction'] == direction + # Compact language and direction + indexing || !index?(value) ? value['@value'] : value else - compact_iri(result['@type'], vocab: true) + value end - result = result.merge('@type' => c_type) - end - - # If the result is an object, tranform keys using any term keyword aliases - if result.is_a?(Hash) && result.keys.any? {|k| self.alias(k) != k} - # log_debug("") {" (map to key aliases)"} - new_element = {} - result.each do |k, v| - new_element[self.alias(k)] = v - end - result = new_element - end - # log_debug("") {"=> #{result.inspect}"} - result - end + if result.is_a?(Hash) && result.key?('@type') && value['@type'] != '@json' + # Compact values of @type + c_type = if result['@type'].is_a?(Array) + result['@type'].map { |t| compact_iri(t, vocab: true) } + else + compact_iri(result['@type'], vocab: true) + end + result = result.merge('@type' => c_type) + end + + # If the result is an object, tranform keys using any term keyword aliases + if result.is_a?(Hash) && result.keys.any? { |k| self.alias(k) != k } + # log_debug("") {" (map to key aliases)"} + new_element = {} + result.each do |k, v| + new_element[self.alias(k)] = v + end + result = new_element + end - ## - # Turn this into a source for a new instantiation - # @param [Array] aliases - # Other URLs to alias when preloading - # @return [String] - def to_rb(*aliases) - canon_base = RDF::URI(context_base).canonicalize - defn = [] - - defn << "base: #{self.base.to_s.inspect}" if self.base - defn << "language: #{self.default_language.inspect}" if self.default_language - defn << "vocab: #{self.vocab.to_s.inspect}" if self.vocab - defn << "processingMode: #{self.processingMode.inspect}" if self.processingMode - term_defs = term_definitions.map do |term, td| - " " + term.inspect + " => " + td.to_rb - end.sort - defn << "term_definitions: {\n#{term_defs.join(",\n") }\n }" unless term_defs.empty? - %(# -*- encoding: utf-8 -*- + # log_debug("") {"=> #{result.inspect}"} + result + end + + ## + # Turn this into a source for a new instantiation + # @param [Array] aliases + # Other URLs to alias when preloading + # @return [String] + def to_rb(*aliases) + canon_base = RDF::URI(context_base).canonicalize + defn = [] + + defn << "base: #{base.to_s.inspect}" if base + defn << "language: #{default_language.inspect}" if default_language + defn << "vocab: #{vocab.to_s.inspect}" if vocab + defn << "processingMode: #{processingMode.inspect}" if processingMode + term_defs = term_definitions.map do |term, td| + " " + term.inspect + " => " + td.to_rb + end.sort + defn << "term_definitions: {\n#{term_defs.join(",\n")}\n }" unless term_defs.empty? + %(# -*- encoding: utf-8 -*- # frozen_string_literal: true # This file generated automatically from #{context_base} require 'json/ld' class JSON::LD::Context ).gsub(/^ /, '') + - %[ add_preloaded("#{canon_base}") do\n new(] + defn.join(", ") + ")\n end\n" + - aliases.map {|a| %[ alias_preloaded("#{a}", "#{canon_base}")\n]}.join("") + - "end\n" - end + %[ add_preloaded("#{canon_base}") do\n new(] + defn.join(", ") + ")\n end\n" + + aliases.map { |a| %[ alias_preloaded("#{a}", "#{canon_base}")\n] }.join + + "end\n" + end - def inspect - v = %w([Context) - v << "base=#{base}" if base - v << "vocab=#{vocab}" if vocab - v << "processingMode=#{processingMode}" if processingMode - v << "default_language=#{default_language}" if default_language - v << "default_direction=#{default_direction}" if default_direction - v << "previous_context" if previous_context - v << "term_definitions[#{term_definitions.length}]=#{term_definitions}" - v.join(" ") + "]" - end + def inspect + v = %w([Context) + v << "base=#{base}" if base + v << "vocab=#{vocab}" if vocab + v << "processingMode=#{processingMode}" if processingMode + v << "default_language=#{default_language}" if default_language + v << "default_direction=#{default_direction}" if default_direction + v << "previous_context" if previous_context + v << "term_definitions[#{term_definitions.length}]=#{term_definitions}" + v.join(" ") + "]" + end - # Duplicate an active context, allowing it to be modified. - def dup - that = self - ec = Context.new(unfrozen: true, **@options) - ec.context_base = that.context_base - ec.base = that.base unless that.base.nil? - ec.default_direction = that.default_direction - ec.default_language = that.default_language - ec.previous_context = that.previous_context - ec.processingMode = that.processingMode if that.instance_variable_get(:@processingMode) - ec.vocab = that.vocab if that.vocab - - ec.instance_eval do - @term_definitions = that.term_definitions.dup - @iri_to_term = that.iri_to_term + # Duplicate an active context, allowing it to be modified. + def dup + that = self + ec = Context.new(unfrozen: true, **@options) + ec.context_base = that.context_base + ec.base = that.base unless that.base.nil? + ec.default_direction = that.default_direction + ec.default_language = that.default_language + ec.previous_context = that.previous_context + ec.processingMode = that.processingMode if that.instance_variable_get(:@processingMode) + ec.vocab = that.vocab if that.vocab + + ec.instance_eval do + @term_definitions = that.term_definitions.dup + @iri_to_term = that.iri_to_term + end + ec end - ec - end - protected + protected - ## - # Determine if `term` is a suitable term. - # Term may be any valid JSON string. - # - # @param [String] term - # @return [Boolean] - def term_valid?(term) - term.is_a?(String) && !term.empty? - end + ## + # Determine if `term` is a suitable term. + # Term may be any valid JSON string. + # + # @param [String] term + # @return [Boolean] + def term_valid?(term) + term.is_a?(String) && !term.empty? + end - # Reverse term mapping, typically used for finding aliases for keys. - # - # Returns either the original value, or a mapping for this value. - # - # @example - # {"@context": {"id": "@id"}, "@id": "foo"} => {"id": "foo"} - # - # @param [RDF::URI, String] value - # @return [String] - def alias(value) - iri_to_term.fetch(value, value) - end + # Reverse term mapping, typically used for finding aliases for keys. + # + # Returns either the original value, or a mapping for this value. + # + # @example + # {"@context": {"id": "@id"}, "@id": "foo"} => {"id": "foo"} + # + # @param [RDF::URI, String] value + # @return [String] + def alias(value) + iri_to_term.fetch(value, value) + end - private - - CONTEXT_CONTAINER_ARRAY_TERMS = Set.new(%w(@set @list @graph)).freeze - CONTEXT_CONTAINER_ID_GRAPH = Set.new(%w(@id @graph)).freeze - CONTEXT_CONTAINER_INDEX_GRAPH = Set.new(%w(@index @graph)).freeze - CONTEXT_BASE_FRAG_OR_QUERY = %w(? #).freeze - CONTEXT_TYPE_ID_VOCAB = %w(@id @vocab).freeze - - ## - # Reads the `@context` from an IO - def load_context(io, **options) - io.rewind - remote_doc = API.loadRemoteDocument(io, **options) - remote_doc.document.is_a?(String) ? - MultiJson.load(remote_doc.document) : - remote_doc.document - end + private - def uri(value) - case value.to_s - when /^_:(.*)$/ - # Map BlankNodes if a namer is given - # log_debug "uri(bnode)#{value}: #{$1}" - bnode(namer.get_sym($1)) - else - value = RDF::URI(value) - #value.validate! if options[:validate] - value + CONTEXT_CONTAINER_ARRAY_TERMS = Set.new(%w[@set @list @graph]).freeze + CONTEXT_CONTAINER_ID_GRAPH = Set.new(%w[@id @graph]).freeze + CONTEXT_CONTAINER_INDEX_GRAPH = Set.new(%w[@index @graph]).freeze + CONTEXT_BASE_FRAG_OR_QUERY = %w[? #].freeze + CONTEXT_TYPE_ID_VOCAB = %w[@id @vocab].freeze + + ## + # Reads the `@context` from an IO + def load_context(io, **options) + io.rewind + remote_doc = API.loadRemoteDocument(io, **options) + if remote_doc.document.is_a?(String) + MultiJson.load(remote_doc.document) + else + remote_doc.document + end end - end - # Keep track of allocated BNodes - # - # Don't actually use the name provided, to prevent name alias issues. - # @return [RDF::Node] - def bnode(value = nil) - @@bnode_cache ||= {} - @@bnode_cache[value.to_s] ||= RDF::Node.new(value) - end + def uri(value) + case value.to_s + when /^_:(.*)$/ + # Map BlankNodes if a namer is given + # log_debug "uri(bnode)#{value}: #{$1}" + bnode(namer.get_sym(::Regexp.last_match(1))) + else + RDF::URI(value) + # value.validate! if options[:validate] + + end + end + + # Keep track of allocated BNodes + # + # Don't actually use the name provided, to prevent name alias issues. + # @return [RDF::Node] + def bnode(value = nil) + @@bnode_cache ||= {} + @@bnode_cache[value.to_s] ||= RDF::Node.new(value) + end + + ## + # Inverse Context creation + # + # When there is more than one term that could be chosen to compact an IRI, it has to be ensured that the term selection is both deterministic and represents the most context-appropriate choice whilst taking into consideration algorithmic complexity. + # + # In order to make term selections, the concept of an inverse context is introduced. An inverse context is essentially a reverse lookup table that maps container mappings, type mappings, and language mappings to a simple term for a given active context. A inverse context only needs to be generated for an active context if it is being used for compaction. + # + # To make use of an inverse context, a list of preferred container mappings and the type mapping or language mapping are gathered for a particular value associated with an IRI. These parameters are then fed to the Term Selection algorithm, which will find the term that most appropriately matches the value's mappings. + # + # @example Basic structure of resulting inverse context + # { + # "http://example.com/term": { + # "@language": { + # "@null": "term", + # "@none": "term", + # "en": "term", + # "ar_rtl": "term" + # }, + # "@type": { + # "@reverse": "term", + # "@none": "term", + # "http://datatype": "term" + # }, + # "@any": { + # "@none": "term", + # } + # } + # } + # @return [Hash{String => Hash{String => String}}] + # @todo May want to include @set along with container to allow selecting terms using @set over those without @set. May require adding some notion of value cardinality to compact_iri + def inverse_context + Context.inverse_cache[hash] ||= begin + result = {} + default_language = (self.default_language || '@none').downcase + term_definitions.keys.sort do |a, b| + a.length == b.length ? (a <=> b) : (a.length <=> b.length) + end.each do |term| + next unless (td = term_definitions[term]) + + container = td.container_mapping.to_a.join + if container.empty? + container = td.as_set? ? %(@set) : %(@none) + end - ## - # Inverse Context creation - # - # When there is more than one term that could be chosen to compact an IRI, it has to be ensured that the term selection is both deterministic and represents the most context-appropriate choice whilst taking into consideration algorithmic complexity. - # - # In order to make term selections, the concept of an inverse context is introduced. An inverse context is essentially a reverse lookup table that maps container mappings, type mappings, and language mappings to a simple term for a given active context. A inverse context only needs to be generated for an active context if it is being used for compaction. - # - # To make use of an inverse context, a list of preferred container mappings and the type mapping or language mapping are gathered for a particular value associated with an IRI. These parameters are then fed to the Term Selection algorithm, which will find the term that most appropriately matches the value's mappings. - # - # @example Basic structure of resulting inverse context - # { - # "http://example.com/term": { - # "@language": { - # "@null": "term", - # "@none": "term", - # "en": "term", - # "ar_rtl": "term" - # }, - # "@type": { - # "@reverse": "term", - # "@none": "term", - # "http://datatype": "term" - # }, - # "@any": { - # "@none": "term", - # } - # } - # } - # @return [Hash{String => Hash{String => String}}] - # @todo May want to include @set along with container to allow selecting terms using @set over those without @set. May require adding some notion of value cardinality to compact_iri - def inverse_context - Context.inverse_cache[self.hash] ||= begin - result = {} - default_language = (self.default_language || '@none').downcase - term_definitions.keys.sort do |a, b| - a.length == b.length ? (a <=> b) : (a.length <=> b.length) - end.each do |term| - next unless td = term_definitions[term] - - container = td.container_mapping.to_a.join('') - if container.empty? - container = td.as_set? ? %(@set) : %(@none) - end - - container_map = result[td.id.to_s] ||= {} - tl_map = container_map[container] ||= {'@language' => {}, '@type' => {}, '@any' => {}} - type_map = tl_map['@type'] - language_map = tl_map['@language'] - any_map = tl_map['@any'] - any_map['@none'] ||= term - if td.reverse_property - type_map['@reverse'] ||= term - elsif td.type_mapping == '@none' - type_map['@any'] ||= term - language_map['@any'] ||= term - any_map['@any'] ||= term - elsif td.type_mapping - type_map[td.type_mapping.to_s] ||= term - elsif !td.language_mapping.nil? && !td.direction_mapping.nil? - lang_dir = if td.language_mapping && td.direction_mapping - "#{td.language_mapping}_#{td.direction_mapping}".downcase - elsif td.language_mapping - td.language_mapping.downcase - elsif td.direction_mapping - "_#{td.direction_mapping}" + container_map = result[td.id.to_s] ||= {} + tl_map = container_map[container] ||= { '@language' => {}, '@type' => {}, '@any' => {} } + type_map = tl_map['@type'] + language_map = tl_map['@language'] + any_map = tl_map['@any'] + any_map['@none'] ||= term + if td.reverse_property + type_map['@reverse'] ||= term + elsif td.type_mapping == '@none' + type_map['@any'] ||= term + language_map['@any'] ||= term + any_map['@any'] ||= term + elsif td.type_mapping + type_map[td.type_mapping.to_s] ||= term + elsif !td.language_mapping.nil? && !td.direction_mapping.nil? + lang_dir = if td.language_mapping && td.direction_mapping + "#{td.language_mapping}_#{td.direction_mapping}".downcase + elsif td.language_mapping + td.language_mapping.downcase + elsif td.direction_mapping + "_#{td.direction_mapping}" + else + "@null" + end + language_map[lang_dir] ||= term + elsif !td.language_mapping.nil? + lang_dir = (td.language_mapping || '@null').downcase + language_map[lang_dir] ||= term + elsif !td.direction_mapping.nil? + lang_dir = td.direction_mapping ? "_#{td.direction_mapping}" : '@none' + language_map[lang_dir] ||= term + elsif default_direction + language_map["_#{default_direction}"] ||= term + language_map['@none'] ||= term + type_map['@none'] ||= term else - "@null" + language_map[default_language] ||= term + language_map['@none'] ||= term + type_map['@none'] ||= term end - language_map[lang_dir] ||= term - elsif !td.language_mapping.nil? - lang_dir = (td.language_mapping || '@null').downcase - language_map[lang_dir] ||= term - elsif !td.direction_mapping.nil? - lang_dir = td.direction_mapping ? "_#{td.direction_mapping}" : '@none' - language_map[lang_dir] ||= term - elsif default_direction - language_map["_#{default_direction}"] ||= term - language_map['@none'] ||= term - type_map['@none'] ||= term - else - language_map[default_language] ||= term - language_map['@none'] ||= term - type_map['@none'] ||= term end + result end - result end - end - ## - # This algorithm, invoked via the IRI Compaction algorithm, makes use of an active context's inverse context to find the term that is best used to compact an IRI. Other information about a value associated with the IRI is given, including which container mappings and which type mapping or language mapping would be best used to express the value. - # - # @param [String] iri - # @param [Array] containers - # represents an ordered list of preferred container mappings - # @param [String] type_language - # indicates whether to look for a term with a matching type mapping or language mapping - # @param [Array] preferred_values - # for the type mapping or language mapping - # @return [String] - def select_term(iri, containers, type_language, preferred_values) - # log_debug("select_term") { - # "iri: #{iri.inspect}, " + - # "containers: #{containers.inspect}, " + - # "type_language: #{type_language.inspect}, " + - # "preferred_values: #{preferred_values.inspect}" - #} - container_map = inverse_context[iri] - # log_debug(" ") {"container_map: #{container_map.inspect}"} - containers.each do |container| - next unless container_map.key?(container) - tl_map = container_map[container] - value_map = tl_map[type_language] - preferred_values.each do |item| - next unless value_map.key?(item) - # log_debug("=>") {value_map[item].inspect} - return value_map[item] + ## + # This algorithm, invoked via the IRI Compaction algorithm, makes use of an active context's inverse context to find the term that is best used to compact an IRI. Other information about a value associated with the IRI is given, including which container mappings and which type mapping or language mapping would be best used to express the value. + # + # @param [String] iri + # @param [Array] containers + # represents an ordered list of preferred container mappings + # @param [String] type_language + # indicates whether to look for a term with a matching type mapping or language mapping + # @param [Array] preferred_values + # for the type mapping or language mapping + # @return [String] + def select_term(iri, containers, type_language, preferred_values) + # log_debug("select_term") { + # "iri: #{iri.inspect}, " + + # "containers: #{containers.inspect}, " + + # "type_language: #{type_language.inspect}, " + + # "preferred_values: #{preferred_values.inspect}" + # } + container_map = inverse_context[iri] + # log_debug(" ") {"container_map: #{container_map.inspect}"} + containers.each do |container| + next unless container_map.key?(container) + + tl_map = container_map[container] + value_map = tl_map[type_language] + preferred_values.each do |item| + next unless value_map.key?(item) + + # log_debug("=>") {value_map[item].inspect} + return value_map[item] + end end + # log_debug("=>") {"nil"} + nil end - # log_debug("=>") {"nil"} - nil - end - ## - # Removes a base IRI from the given absolute IRI. - # - # @param [String] base the base used for making `iri` relative - # @param [String] iri the absolute IRI - # @return [String] - # the relative IRI if relative to base, otherwise the absolute IRI. - def remove_base(base, iri) - return iri unless base - @base_and_parents ||= begin - u = base - iri_set = u.to_s.end_with?('/') ? [u.to_s] : [] - iri_set << u.to_s while (u != './' && u = u.parent) - iri_set - end - b = base.to_s - return iri[b.length..-1] if iri.start_with?(b) && CONTEXT_BASE_FRAG_OR_QUERY.include?(iri[b.length, 1]) + ## + # Removes a base IRI from the given absolute IRI. + # + # @param [String] base the base used for making `iri` relative + # @param [String] iri the absolute IRI + # @return [String] + # the relative IRI if relative to base, otherwise the absolute IRI. + def remove_base(base, iri) + return iri unless base + + @base_and_parents ||= begin + u = base + iri_set = u.to_s.end_with?('/') ? [u.to_s] : [] + iri_set << u.to_s while u != './' && (u = u.parent) + iri_set + end + b = base.to_s + return iri[b.length..] if iri.start_with?(b) && CONTEXT_BASE_FRAG_OR_QUERY.include?(iri[b.length, 1]) - @base_and_parents.each_with_index do |bb, index| - next unless iri.start_with?(bb) - rel = "../" * index + iri[bb.length..-1] - return rel.empty? ? "./" : rel + @base_and_parents.each_with_index do |bb, index| + next unless iri.start_with?(bb) + + rel = ("../" * index) + iri[bb.length..] + return rel.empty? ? "./" : rel + end + iri end - iri - end - ## Used for testing - # Retrieve term mappings - # - # @return [Array] - def mappings - {}.tap do |memo| - term_definitions.each_pair do |t,td| - memo[t] = td ? td.id : nil + ## Used for testing + # Retrieve term mappings + # + # @return [Array] + def mappings + {}.tap do |memo| + term_definitions.each_pair do |t, td| + memo[t] = td ? td.id : nil + end end end - end - ## Used for testing - # Retrieve term mapping - # - # @param [String, #to_s] term - # - # @return [RDF::URI, String] - def mapping(term) - term_definitions[term] ? term_definitions[term].id : nil - end + ## Used for testing + # Retrieve term mapping + # + # @param [String, #to_s] term + # + # @return [RDF::URI, String] + def mapping(term) + term_definitions[term]&.id + end - ## Used for testing - # Retrieve language mappings - # - # @return [Array] - # @deprecated - def languages - {}.tap do |memo| - term_definitions.each_pair do |t,td| - memo[t] = td.language_mapping + ## Used for testing + # Retrieve language mappings + # + # @return [Array] + # @deprecated + def languages + {}.tap do |memo| + term_definitions.each_pair do |t, td| + memo[t] = td.language_mapping + end end end - end - # Ensure @container mapping is appropriate - # The result is the original container definition. For IRI containers, this is necessary to be able to determine the @type mapping for string values - def check_container(container, local_context, defined, term) - if container.is_a?(Array) && processingMode('json-ld-1.0') - raise JsonLdError::InvalidContainerMapping, - "'@container' on term #{term.inspect} must be a string: #{container.inspect}" - end + # Ensure @container mapping is appropriate + # The result is the original container definition. For IRI containers, this is necessary to be able to determine the @type mapping for string values + def check_container(container, _local_context, _defined, term) + if container.is_a?(Array) && processingMode('json-ld-1.0') + raise JsonLdError::InvalidContainerMapping, + "'@container' on term #{term.inspect} must be a string: #{container.inspect}" + end - val = Set.new(Array(container)) - val.delete('@set') if has_set = val.include?('@set') - - if val.include?('@list') - raise JsonLdError::InvalidContainerMapping, - "'@container' on term #{term.inspect} using @list cannot have any other values" unless - !has_set && val.length == 1 - # Okay - elsif val.include?('@language') - raise JsonLdError::InvalidContainerMapping, - "unknown mapping for '@container' to #{container.inspect} on term #{term.inspect}" if - has_set && processingMode('json-ld-1.0') - raise JsonLdError::InvalidContainerMapping, - "'@container' on term #{term.inspect} using @language cannot have any values other than @set, found #{container.inspect}" unless - val.length == 1 - # Okay - elsif val.include?('@index') - raise JsonLdError::InvalidContainerMapping, - "unknown mapping for '@container' to #{container.inspect} on term #{term.inspect}" if - has_set && processingMode('json-ld-1.0') - raise JsonLdError::InvalidContainerMapping, - "'@container' on term #{term.inspect} using @index cannot have any values other than @set and/or @graph, found #{container.inspect}" unless - (val - CONTEXT_CONTAINER_INDEX_GRAPH).empty? - # Okay - elsif val.include?('@id') - raise JsonLdError::InvalidContainerMapping, - "unknown mapping for '@container' to #{container.inspect} on term #{term.inspect}" if - processingMode('json-ld-1.0') - raise JsonLdError::InvalidContainerMapping, - "'@container' on term #{term.inspect} using @id cannot have any values other than @set and/or @graph, found #{container.inspect}" unless - val.subset?(CONTEXT_CONTAINER_ID_GRAPH) - # Okay - elsif val.include?('@type') || val.include?('@graph') - raise JsonLdError::InvalidContainerMapping, - "unknown mapping for '@container' to #{container.inspect} on term #{term.inspect}" if - processingMode('json-ld-1.0') - raise JsonLdError::InvalidContainerMapping, - "'@container' on term #{term.inspect} using @language cannot have any values other than @set, found #{container.inspect}" unless - val.length == 1 - # Okay - elsif val.empty? - # Okay - else - raise JsonLdError::InvalidContainerMapping, + val = Set.new(Array(container)) + val.delete('@set') if (has_set = val.include?('@set')) + + if val.include?('@list') + unless !has_set && val.length == 1 + raise JsonLdError::InvalidContainerMapping, + "'@container' on term #{term.inspect} using @list cannot have any other values" + end + # Okay + elsif val.include?('@language') + if has_set && processingMode('json-ld-1.0') + raise JsonLdError::InvalidContainerMapping, + "unknown mapping for '@container' to #{container.inspect} on term #{term.inspect}" + end + unless val.length == 1 + raise JsonLdError::InvalidContainerMapping, + "'@container' on term #{term.inspect} using @language cannot have any values other than @set, found #{container.inspect}" + end + # Okay + elsif val.include?('@index') + if has_set && processingMode('json-ld-1.0') + raise JsonLdError::InvalidContainerMapping, + "unknown mapping for '@container' to #{container.inspect} on term #{term.inspect}" + end + unless (val - CONTEXT_CONTAINER_INDEX_GRAPH).empty? + raise JsonLdError::InvalidContainerMapping, + "'@container' on term #{term.inspect} using @index cannot have any values other than @set and/or @graph, found #{container.inspect}" + end + # Okay + elsif val.include?('@id') + if processingMode('json-ld-1.0') + raise JsonLdError::InvalidContainerMapping, "unknown mapping for '@container' to #{container.inspect} on term #{term.inspect}" + end + unless val.subset?(CONTEXT_CONTAINER_ID_GRAPH) + raise JsonLdError::InvalidContainerMapping, + "'@container' on term #{term.inspect} using @id cannot have any values other than @set and/or @graph, found #{container.inspect}" + end + # Okay + elsif val.include?('@type') || val.include?('@graph') + if processingMode('json-ld-1.0') + raise JsonLdError::InvalidContainerMapping, + "unknown mapping for '@container' to #{container.inspect} on term #{term.inspect}" + end + unless val.length == 1 + raise JsonLdError::InvalidContainerMapping, + "'@container' on term #{term.inspect} using @language cannot have any values other than @set, found #{container.inspect}" + end + # Okay + elsif val.empty? + # Okay + else + raise JsonLdError::InvalidContainerMapping, + "unknown mapping for '@container' to #{container.inspect} on term #{term.inspect}" + end + Array(container) end - Array(container) - end - # Term Definitions specify how properties and values have to be interpreted as well as the current vocabulary mapping and the default language - class TermDefinition - # @return [RDF::URI] IRI map - attr_accessor :id + # Term Definitions specify how properties and values have to be interpreted as well as the current vocabulary mapping and the default language + class TermDefinition + # @return [RDF::URI] IRI map + attr_accessor :id - # @return [String] term name - attr_accessor :term + # @return [String] term name + attr_accessor :term - # @return [String] Type mapping - attr_accessor :type_mapping + # @return [String] Type mapping + attr_accessor :type_mapping - # Base container mapping, without @set - # @return [Array<'@index', '@language', '@index', '@set', '@type', '@id', '@graph'>] Container mapping - attr_reader :container_mapping + # Base container mapping, without @set + # @return [Array<'@index', '@language', '@index', '@set', '@type', '@id', '@graph'>] Container mapping + attr_reader :container_mapping - # @return [String] Term used for nest properties - attr_accessor :nest + # @return [String] Term used for nest properties + attr_accessor :nest - # Language mapping of term, `false` is used if there is an explicit language mapping for this term. - # @return [String] Language mapping - attr_accessor :language_mapping + # Language mapping of term, `false` is used if there is an explicit language mapping for this term. + # @return [String] Language mapping + attr_accessor :language_mapping - # Direction of term, `false` is used if there is explicit direction mapping mapping for this term. - # @return ["ltr", "rtl"] direction_mapping - attr_accessor :direction_mapping + # Direction of term, `false` is used if there is explicit direction mapping mapping for this term. + # @return ["ltr", "rtl"] direction_mapping + attr_accessor :direction_mapping - # @return [Boolean] Reverse Property - attr_accessor :reverse_property + # @return [Boolean] Reverse Property + attr_accessor :reverse_property - # This is a simple term definition, not an expanded term definition - # @return [Boolean] - attr_accessor :simple + # This is a simple term definition, not an expanded term definition + # @return [Boolean] + attr_accessor :simple - # Property used for data indexing; defaults to @index - # @return [Boolean] - attr_accessor :index + # Property used for data indexing; defaults to @index + # @return [Boolean] + attr_accessor :index - # Indicate that term may be used as a prefix - attr_writer :prefix + # Indicate that term may be used as a prefix + attr_writer :prefix - # Term-specific context - # @return [Hash{String => Object}] - attr_accessor :context + # Term-specific context + # @return [Hash{String => Object}] + attr_accessor :context - # Term is protected. - # @return [Boolean] - attr_writer :protected + # Term is protected. + # @return [Boolean] + attr_writer :protected - # This is a simple term definition, not an expanded term definition - # @return [Boolean] simple - def simple?; simple; end + # This is a simple term definition, not an expanded term definition + # @return [Boolean] simple + def simple? + simple + end - # This is an appropriate term to use as the prefix of a compact IRI - # @return [Boolean] simple - def prefix?; @prefix; end + # This is an appropriate term to use as the prefix of a compact IRI + # @return [Boolean] simple + def prefix? + @prefix + end - # Create a new Term Mapping with an ID - # @param [String] term - # @param [String] id - # @param [String] type_mapping Type mapping - # @param [Set<'@index', '@language', '@index', '@set', '@type', '@id', '@graph'>] container_mapping - # @param [String] language_mapping - # Language mapping of term, `false` is used if there is an explicit language mapping for this term - # @param ["ltr", "rtl"] direction_mapping - # Direction mapping of term, `false` is used if there is an explicit direction mapping for this term - # @param [Boolean] reverse_property - # @param [Boolean] protected mark resulting context as protected - # @param [String] nest term used for nest properties - # @param [Boolean] simple - # This is a simple term definition, not an expanded term definition - # @param [Boolean] prefix - # Term may be used as a prefix - def initialize(term, - id: nil, - index: nil, - type_mapping: nil, - container_mapping: nil, - language_mapping: nil, - direction_mapping: nil, - reverse_property: false, - nest: nil, - protected: nil, - simple: false, - prefix: nil, - context: nil) - @term = term - @id = id.to_s unless id.nil? - @index = index.to_s unless index.nil? - @type_mapping = type_mapping.to_s unless type_mapping.nil? - self.container_mapping = container_mapping - @language_mapping = language_mapping unless language_mapping.nil? - @direction_mapping = direction_mapping unless direction_mapping.nil? - @reverse_property = reverse_property - @protected = protected - @nest = nest unless nest.nil? - @simple = simple - @prefix = prefix unless prefix.nil? - @context = context unless context.nil? - end + # Create a new Term Mapping with an ID + # @param [String] term + # @param [String] id + # @param [String] type_mapping Type mapping + # @param [Set<'@index', '@language', '@index', '@set', '@type', '@id', '@graph'>] container_mapping + # @param [String] language_mapping + # Language mapping of term, `false` is used if there is an explicit language mapping for this term + # @param ["ltr", "rtl"] direction_mapping + # Direction mapping of term, `false` is used if there is an explicit direction mapping for this term + # @param [Boolean] reverse_property + # @param [Boolean] protected mark resulting context as protected + # @param [String] nest term used for nest properties + # @param [Boolean] simple + # This is a simple term definition, not an expanded term definition + # @param [Boolean] prefix + # Term may be used as a prefix + def initialize(term, + id: nil, + index: nil, + type_mapping: nil, + container_mapping: nil, + language_mapping: nil, + direction_mapping: nil, + reverse_property: false, + nest: nil, + protected: nil, + simple: false, + prefix: nil, + context: nil) + @term = term + @id = id.to_s unless id.nil? + @index = index.to_s unless index.nil? + @type_mapping = type_mapping.to_s unless type_mapping.nil? + self.container_mapping = container_mapping + @language_mapping = language_mapping unless language_mapping.nil? + @direction_mapping = direction_mapping unless direction_mapping.nil? + @reverse_property = reverse_property + @protected = protected + @nest = nest unless nest.nil? + @simple = simple + @prefix = prefix unless prefix.nil? + @context = context unless context.nil? + end - # Term is protected. - # @return [Boolean] - def protected?; !!@protected; end - - # Set container mapping, from an array which may include @set - def container_mapping=(mapping) - mapping = case mapping - when Set then mapping - when Array then Set.new(mapping) - when String then Set[mapping] - when nil then Set.new - else - raise "Shouldn't happen with #{mapping.inspect}" + # Term is protected. + # @return [Boolean] + def protected? + !!@protected end - if @as_set = mapping.include?('@set') - mapping = mapping.dup - mapping.delete('@set') + + # Returns true if the term matches a IRI + # + # @param iri [String] the IRI + # @return [Boolean] + def match_iri?(iri) + iri.start_with?(id) end - @container_mapping = mapping - @index ||= '@index' if mapping.include?('@index') - end - ## - # Output Hash or String definition for this definition considering @language and @vocab - # - # @param [Context] context - # @return [String, Hash{String => Array[String], String}] - def to_context_definition(context) - cid = if context.vocab && id.start_with?(context.vocab) - # Nothing to return unless it's the same as the vocab - id == context.vocab ? context.vocab : id.to_s[context.vocab.length..-1] - else - # Find a term to act as a prefix - iri, prefix = context.iri_to_term.detect {|i,p| id.to_s.start_with?(i.to_s)} - iri && iri != id ? "#{prefix}:#{id.to_s[iri.length..-1]}" : id + # Returns true if the term matches a compact IRI + # + # @param iri [String] the compact IRI + # @return [Boolean] + def match_compact_iri?(iri) + iri.start_with?(prefix_colon) end - if simple? - cid.to_s unless cid == term && context.vocab - else - defn = {} - defn[reverse_property ? '@reverse' : '@id'] = cid.to_s unless cid == term && !reverse_property - if type_mapping - defn['@type'] = if KEYWORDS.include?(type_mapping) - type_mapping - else - context.compact_iri(type_mapping, vocab: true) + # Set container mapping, from an array which may include @set + def container_mapping=(mapping) + mapping = case mapping + when Set then mapping + when Array then Set.new(mapping) + when String then Set[mapping] + when nil then Set.new + else + raise "Shouldn't happen with #{mapping.inspect}" + end + if (@as_set = mapping.include?('@set')) + mapping = mapping.dup + mapping.delete('@set') + end + @container_mapping = mapping + @index ||= '@index' if mapping.include?('@index') + end + + ## + # Output Hash or String definition for this definition considering @language and @vocab + # + # @param [Context] context + # @return [String, Hash{String => Array[String], String}] + def to_context_definition(context) + cid = if context.vocab && id.start_with?(context.vocab) + # Nothing to return unless it's the same as the vocab + id == context.vocab ? context.vocab : id.to_s[context.vocab.length..] + else + # Find a term to act as a prefix + iri, prefix = context.iri_to_term.detect { |i, _p| id.to_s.start_with?(i.to_s) } + iri && iri != id ? "#{prefix}:#{id.to_s[iri.length..]}" : id + end + + if simple? + cid.to_s unless cid == term && context.vocab + else + defn = {} + defn[reverse_property ? '@reverse' : '@id'] = cid.to_s unless cid == term && !reverse_property + if type_mapping + defn['@type'] = if KEYWORDS.include?(type_mapping) + type_mapping + else + context.compact_iri(type_mapping, vocab: true) + end + end + + cm = Array(container_mapping) + cm << "@set" if as_set? && !cm.include?("@set") + cm = cm.first if cm.length == 1 + defn['@container'] = cm unless cm.empty? + # Language set as false to be output as null + defn['@language'] = (@language_mapping || nil) unless @language_mapping.nil? + defn['@direction'] = (@direction_mapping || nil) unless @direction_mapping.nil? + defn['@context'] = @context if @context + defn['@nest'] = @nest if @nest + defn['@index'] = @index if @index + defn['@prefix'] = @prefix unless @prefix.nil? + defn + end + end + + ## + # Turn this into a source for a new instantiation + # FIXME: context serialization + # @return [String] + def to_rb + defn = [%(TermDefinition.new\(#{term.inspect})] + %w[id index type_mapping container_mapping language_mapping direction_mapping reverse_property nest simple + prefix context protected].each do |acc| + v = instance_variable_get("@#{acc}".to_sym) + v = v.to_s if v.is_a?(RDF::Term) + if acc == 'container_mapping' + v = v.to_a + v << '@set' if as_set? + v = v.first if v.length <= 1 end + defn << "#{acc}: #{v.inspect}" if v end + defn.join(', ') + ")" + end - cm = Array(container_mapping) - cm << "@set" if as_set? && !cm.include?("@set") - cm = cm.first if cm.length == 1 - defn['@container'] = cm unless cm.empty? - # Language set as false to be output as null - defn['@language'] = (@language_mapping ? @language_mapping : nil) unless @language_mapping.nil? - defn['@direction'] = (@direction_mapping ? @direction_mapping : nil) unless @direction_mapping.nil? - defn['@context'] = @context if @context - defn['@nest'] = @nest if @nest - defn['@index'] = @index if @index - defn['@prefix'] = @prefix unless @prefix.nil? - defn + # If container mapping was defined along with @set + # @return [Boolean] + def as_set? + @as_set || false end - end - ## - # Turn this into a source for a new instantiation - # FIXME: context serialization - # @return [String] - def to_rb - defn = [%(TermDefinition.new\(#{term.inspect})] - %w(id index type_mapping container_mapping language_mapping direction_mapping reverse_property nest simple prefix context protected).each do |acc| - v = instance_variable_get("@#{acc}".to_sym) - v = v.to_s if v.is_a?(RDF::Term) - if acc == 'container_mapping' - v = v.to_a - v << '@set' if as_set? - v = v.first if v.length <= 1 - end - defn << "#{acc}: #{v.inspect}" if v - end - defn.join(', ') + ")" - end + # Check if term definitions are identical, modulo @protected + # @return [Boolean] + def ==(other) + other.is_a?(TermDefinition) && + id == other.id && + term == other.term && + type_mapping == other.type_mapping && + container_mapping == other.container_mapping && + nest == other.nest && + language_mapping == other.language_mapping && + direction_mapping == other.direction_mapping && + reverse_property == other.reverse_property && + simple == other.simple && + index == other.index && + context == other.context && + prefix? == other.prefix? && + as_set? == other.as_set? + end - # If container mapping was defined along with @set - # @return [Boolean] - def as_set?; @as_set || false; end + def inspect + v = %w([TD) + v << "id=#{@id}" + v << "index=#{index.inspect}" unless index.nil? + v << "term=#{@term}" + v << "rev" if reverse_property + v << "container=#{container_mapping}" if container_mapping + v << "as_set=#{as_set?.inspect}" + v << "lang=#{language_mapping.inspect}" unless language_mapping.nil? + v << "dir=#{direction_mapping.inspect}" unless direction_mapping.nil? + v << "type=#{type_mapping}" unless type_mapping.nil? + v << "nest=#{nest.inspect}" unless nest.nil? + v << "simple=true" if @simple + v << "protected=true" if @protected + v << "prefix=#{@prefix.inspect}" unless @prefix.nil? + v << "has-context" unless context.nil? + v.join(" ") + "]" + end - # Check if term definitions are identical, modulo @protected - # @return [Boolean] - def ==(other) - other.is_a?(TermDefinition) && - id == other.id && - term == other.term && - type_mapping == other.type_mapping && - container_mapping == other.container_mapping && - nest == other.nest && - language_mapping == other.language_mapping && - direction_mapping == other.direction_mapping && - reverse_property == other.reverse_property && - simple == other.simple && - index == other.index && - context == other.context && - prefix? == other.prefix? && - as_set? == other.as_set? - end + private - def inspect - v = %w([TD) - v << "id=#{@id}" - v << "index=#{index.inspect}" unless index.nil? - v << "term=#{@term}" - v << "rev" if reverse_property - v << "container=#{container_mapping}" if container_mapping - v << "as_set=#{as_set?.inspect}" - v << "lang=#{language_mapping.inspect}" unless language_mapping.nil? - v << "dir=#{direction_mapping.inspect}" unless direction_mapping.nil? - v << "type=#{type_mapping}" unless type_mapping.nil? - v << "nest=#{nest.inspect}" unless nest.nil? - v << "simple=true" if @simple - v << "protected=true" if @protected - v << "prefix=#{@prefix.inspect}" unless @prefix.nil? - v << "has-context" unless context.nil? - v.join(" ") + "]" + def prefix_colon + @prefix_colon ||= "#{term}:".freeze + end end end end diff --git a/lib/json/ld/expand.rb b/lib/json/ld/expand.rb index d041d5ba..73260397 100644 --- a/lib/json/ld/expand.rb +++ b/lib/json/ld/expand.rb @@ -1,761 +1,840 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true + require 'set' -module JSON::LD - ## - # Expand module, used as part of API - module Expand - include Utils +module JSON + module LD + ## + # Expand module, used as part of API + module Expand + include Utils + + # The following constant is used to reduce object allocations + CONTAINER_INDEX_ID_TYPE = Set['@index', '@id', '@type'].freeze + KEY_ID = %w[@id].freeze + KEYS_VALUE_LANGUAGE_TYPE_INDEX_DIRECTION = %w[@value @language @type @index @direction @annotation].freeze + KEYS_SET_LIST_INDEX = %w[@set @list @index].freeze + KEYS_INCLUDED_TYPE_REVERSE = %w[@included @type @reverse].freeze + + ## + # Expand an Array or Object given an active context and performing local context expansion. + # + # @param [Array, Hash] input + # @param [String] active_property + # @param [Context] context + # @param [Boolean] framing (false) + # Special rules for expanding a frame + # @param [Boolean] from_map + # Expanding from a map, which could be an `@type` map, so don't clear out context term definitions + # + # @return [Array Object}>] + def expand(input, active_property, context, + framing: false, from_map: false, log_depth: nil) + # log_debug("expand", depth: log_depth.to_i) {"input: #{input.inspect}, active_property: #{active_property.inspect}, context: #{context.inspect}"} + framing = false if active_property == '@default' + if active_property + expanded_active_property = context.expand_iri(active_property, vocab: true, as_string: true, + base: @options[:base]) + end - # The following constant is used to reduce object allocations - CONTAINER_INDEX_ID_TYPE = Set['@index', '@id', '@type'].freeze - KEY_ID = %w(@id).freeze - KEYS_VALUE_LANGUAGE_TYPE_INDEX_DIRECTION = %w(@value @language @type @index @direction @annotation).freeze - KEYS_SET_LIST_INDEX = %w(@set @list @index).freeze - KEYS_INCLUDED_TYPE_REVERSE = %w(@included @type @reverse).freeze + # Use a term-specific context, if defined, based on the non-type-scoped context. + if active_property && context.term_definitions[active_property] + property_scoped_context = context.term_definitions[active_property].context + end + # log_debug("expand", depth: log_depth.to_i) {"property_scoped_context: #{property_scoped_context.inspect}"} unless property_scoped_context.nil? + + case input + when Array + # If element is an array, + is_list = context.container(active_property).include?('@list') + input.each_with_object([]) do |v, memo| + # Initialize expanded item to the result of using this algorithm recursively, passing active context, active property, and item as element. + v = expand(v, active_property, context, + framing: framing, + from_map: from_map, + log_depth: log_depth.to_i + 1) - ## - # Expand an Array or Object given an active context and performing local context expansion. - # - # @param [Array, Hash] input - # @param [String] active_property - # @param [Context] context - # @param [Boolean] framing (false) - # Special rules for expanding a frame - # @param [Boolean] from_map - # Expanding from a map, which could be an `@type` map, so don't clear out context term definitions - # - # @return [Array Object}>] - def expand(input, active_property, context, - framing: false, from_map: false, log_depth: nil) - # log_debug("expand", depth: log_depth.to_i) {"input: #{input.inspect}, active_property: #{active_property.inspect}, context: #{context.inspect}"} - framing = false if active_property == '@default' - expanded_active_property = context.expand_iri(active_property, vocab: true, as_string: true, base: @options[:base]) if active_property - - # Use a term-specific context, if defined, based on the non-type-scoped context. - property_scoped_context = context.term_definitions[active_property].context if active_property && context.term_definitions[active_property] - # log_debug("expand", depth: log_depth.to_i) {"property_scoped_context: #{property_scoped_context.inspect}"} unless property_scoped_context.nil? - - result = case input - when Array - # If element is an array, - is_list = context.container(active_property).include?('@list') - value = input.each_with_object([]) do |v, memo| - # Initialize expanded item to the result of using this algorithm recursively, passing active context, active property, and item as element. - v = expand(v, active_property, context, - framing: framing, - from_map: from_map, - log_depth: log_depth.to_i + 1) + # If the active property is @list or its container mapping is set to @list and v is an array, change it to a list object + if is_list && v.is_a?(Array) + # Make sure that no member of v contains an annotation object + if v.any? { |n| n.is_a?(Hash) && n.key?('@annotation') } + raise JsonLdError::InvalidAnnotation, + "A list element must not contain @annotation." + end + v = { "@list" => v } + end - # If the active property is @list or its container mapping is set to @list and v is an array, change it to a list object - if is_list && v.is_a?(Array) - # Make sure that no member of v contains an annotation object - raise JsonLdError::InvalidAnnotation, - "A list element must not contain @annotation." if - v.any? {|n| n.is_a?(Hash) && n.key?('@annotation')} - v = {"@list" => v} + case v + when nil then nil + when Array then memo.concat(v) + else memo << v + end end - case v - when nil then nil - when Array then memo.concat(v) - else memo << v + when Hash + if context.previous_context + expanded_key_map = input.keys.inject({}) do |memo, key| + memo.merge(key => context.expand_iri(key, vocab: true, as_string: true, base: @options[:base])) + end + # Revert any previously type-scoped term definitions, unless this is from a map, a value object or a subject reference + revert_context = !from_map && + !expanded_key_map.value?('@value') && + expanded_key_map.values != ['@id'] + + # If there's a previous context, the context was type-scoped + # log_debug("expand", depth: log_depth.to_i) {"previous_context: #{context.previous_context.inspect}"} if revert_context + context = context.previous_context if revert_context end - end - value - when Hash - if context.previous_context - expanded_key_map = input.keys.inject({}) do |memo, key| - memo.merge(key => context.expand_iri(key, vocab: true, as_string: true, base: @options[:base])) + # Apply property-scoped context after reverting term-scoped context + unless property_scoped_context.nil? + context = context.parse(property_scoped_context, base: @options[:base], override_protected: true) end - # Revert any previously type-scoped term definitions, unless this is from a map, a value object or a subject reference - revert_context = !from_map && - !expanded_key_map.values.include?('@value') && - !(expanded_key_map.values == ['@id']) - - # If there's a previous context, the context was type-scoped - # log_debug("expand", depth: log_depth.to_i) {"previous_context: #{context.previous_context.inspect}"} if revert_context - context = context.previous_context if revert_context - end + # log_debug("expand", depth: log_depth.to_i) {"after property_scoped_context: #{context.inspect}"} unless property_scoped_context.nil? - # Apply property-scoped context after reverting term-scoped context - unless property_scoped_context.nil? - context = context.parse(property_scoped_context, base: @options[:base], override_protected: true) - end - # log_debug("expand", depth: log_depth.to_i) {"after property_scoped_context: #{context.inspect}"} unless property_scoped_context.nil? - - # If element contains the key @context, set active context to the result of the Context Processing algorithm, passing active context and the value of the @context key as local context. - if input.key?('@context') - context = context.parse(input['@context'], base: @options[:base]) - # log_debug("expand", depth: log_depth.to_i) {"context: #{context.inspect}"} - end - - # Set the type-scoped context to the context on input, for use later - type_scoped_context = context + # If element contains the key @context, set active context to the result of the Context Processing algorithm, passing active context and the value of the @context key as local context. + if input.key?('@context') + context = context.parse(input['@context'], base: @options[:base]) + # log_debug("expand", depth: log_depth.to_i) {"context: #{context.inspect}"} + end - output_object = {} + # Set the type-scoped context to the context on input, for use later + type_scoped_context = context - # See if keys mapping to @type have terms with a local context - type_key = nil - (input.keys - %w(@context)).sort. - select {|k| context.expand_iri(k, vocab: true, base: @options[:base]) == '@type'}. - each do |tk| + output_object = {} - type_key ||= tk # Side effect saves the first found key mapping to @type - Array(input[tk]).sort.each do |term| - term_context = type_scoped_context.term_definitions[term].context if type_scoped_context.term_definitions[term] - unless term_context.nil? - # log_debug("expand", depth: log_depth.to_i) {"term_context[#{term}]: #{term_context.inspect}"} - context = context.parse(term_context, base: @options[:base], propagate: false) + # See if keys mapping to @type have terms with a local context + type_key = nil + (input.keys - %w[@context]).sort + .select { |k| context.expand_iri(k, vocab: true, base: @options[:base]) == '@type' } + .each do |tk| + type_key ||= tk # Side effect saves the first found key mapping to @type + Array(input[tk]).sort.each do |term| + if type_scoped_context.term_definitions[term] + term_context = type_scoped_context.term_definitions[term].context + end + unless term_context.nil? + # log_debug("expand", depth: log_depth.to_i) {"term_context[#{term}]: #{term_context.inspect}"} + context = context.parse(term_context, base: @options[:base], propagate: false) + end end end - end - # Process each key and value in element. Ignores @nesting content - expand_object(input, active_property, context, output_object, - expanded_active_property: expanded_active_property, - framing: framing, - type_key: type_key, - type_scoped_context: type_scoped_context, - log_depth: log_depth.to_i + 1) - - # log_debug("output object", depth: log_depth.to_i) {output_object.inspect} - - # If result contains the key @value: - if value?(output_object) - keys = output_object.keys - unless (keys - KEYS_VALUE_LANGUAGE_TYPE_INDEX_DIRECTION).empty? - # The result must not contain any keys other than @direction, @value, @language, @type, and @index. It must not contain both the @language key and the @type key. Otherwise, an invalid value object error has been detected and processing is aborted. - raise JsonLdError::InvalidValueObject, - "value object has unknown keys: #{output_object.inspect}" - end + # Process each key and value in element. Ignores @nesting content + expand_object(input, active_property, context, output_object, + expanded_active_property: expanded_active_property, + framing: framing, + type_key: type_key, + type_scoped_context: type_scoped_context, + log_depth: log_depth.to_i + 1) - if keys.include?('@type') && !(keys & %w(@language @direction)).empty? - # @type is inconsistent with either @language or @direction - raise JsonLdError::InvalidValueObject, - "value object must not include @type with either @language or @direction: #{output_object.inspect}" - end + # log_debug("output object", depth: log_depth.to_i) {output_object.inspect} + + # If result contains the key @value: + if value?(output_object) + keys = output_object.keys + unless (keys - KEYS_VALUE_LANGUAGE_TYPE_INDEX_DIRECTION).empty? + # The result must not contain any keys other than @direction, @value, @language, @type, and @index. It must not contain both the @language key and the @type key. Otherwise, an invalid value object error has been detected and processing is aborted. + raise JsonLdError::InvalidValueObject, + "value object has unknown keys: #{output_object.inspect}" + end - output_object.delete('@language') if output_object.key?('@language') && Array(output_object['@language']).empty? - type_is_json = output_object['@type'] == '@json' - output_object.delete('@type') if output_object.key?('@type') && Array(output_object['@type']).empty? - - # If the value of result's @value key is null, then set result to null and @type is not @json. - ary = Array(output_object['@value']) - return nil if ary.empty? && !type_is_json - - if output_object['@type'] == '@json' && context.processingMode('json-ld-1.1') - # Any value of @value is okay if @type: @json - elsif !ary.all? {|v| v.is_a?(String) || v.is_a?(Hash) && v.empty?} && output_object.key?('@language') - # Otherwise, if the value of result's @value member is not a string and result contains the key @language, an invalid language-tagged value error has been detected (only strings can be language-tagged) and processing is aborted. - raise JsonLdError::InvalidLanguageTaggedValue, - "when @language is used, @value must be a string: #{output_object.inspect}" - elsif output_object['@type'] && - (!Array(output_object['@type']).all? {|t| - t.is_a?(String) && RDF::URI(t).valid? && !t.start_with?('_:') || - t.is_a?(Hash) && t.empty?} || - !framing && !output_object['@type'].is_a?(String)) - # Otherwise, if the result has a @type member and its value is not an IRI, an invalid typed value error has been detected and processing is aborted. - raise JsonLdError::InvalidTypedValue, - "value of @type must be an IRI or '@json': #{output_object.inspect}" - elsif !framing && !output_object.fetch('@type', '').is_a?(String) && + if keys.include?('@type') && !(keys & %w[@language @direction]).empty? + # @type is inconsistent with either @language or @direction + raise JsonLdError::InvalidValueObject, + "value object must not include @type with either @language or @direction: #{output_object.inspect}" + end + + if output_object.key?('@language') && Array(output_object['@language']).empty? + output_object.delete('@language') + end + type_is_json = output_object['@type'] == '@json' + output_object.delete('@type') if output_object.key?('@type') && Array(output_object['@type']).empty? + + # If the value of result's @value key is null, then set result to null and @type is not @json. + ary = Array(output_object['@value']) + return nil if ary.empty? && !type_is_json + + if output_object['@type'] == '@json' && context.processingMode('json-ld-1.1') + # Any value of @value is okay if @type: @json + elsif !ary.all? { |v| v.is_a?(String) || (v.is_a?(Hash) && v.empty?) } && output_object.key?('@language') + # Otherwise, if the value of result's @value member is not a string and result contains the key @language, an invalid language-tagged value error has been detected (only strings can be language-tagged) and processing is aborted. + raise JsonLdError::InvalidLanguageTaggedValue, + "when @language is used, @value must be a string: #{output_object.inspect}" + elsif output_object['@type'] && + (!Array(output_object['@type']).all? do |t| + (t.is_a?(String) && RDF::URI(t).valid? && !t.start_with?('_:')) || + (t.is_a?(Hash) && t.empty?) + end || + (!framing && !output_object['@type'].is_a?(String))) + # Otherwise, if the result has a @type member and its value is not an IRI, an invalid typed value error has been detected and processing is aborted. + raise JsonLdError::InvalidTypedValue, + "value of @type must be an IRI or '@json': #{output_object.inspect}" + elsif !framing && !output_object.fetch('@type', '').is_a?(String) && RDF::URI(t).valid? && !t.start_with?('_:') - # Otherwise, if the result has a @type member and its value is not an IRI, an invalid typed value error has been detected and processing is aborted. - raise JsonLdError::InvalidTypedValue, - "value of @type must be an IRI or '@json': #{output_object.inspect}" + # Otherwise, if the result has a @type member and its value is not an IRI, an invalid typed value error has been detected and processing is aborted. + raise JsonLdError::InvalidTypedValue, + "value of @type must be an IRI or '@json': #{output_object.inspect}" + end + elsif !output_object.fetch('@type', []).is_a?(Array) + # Otherwise, if result contains the key @type and its associated value is not an array, set it to an array containing only the associated value. + output_object['@type'] = [output_object['@type']] + elsif output_object.key?('@set') || output_object.key?('@list') + # Otherwise, if result contains the key @set or @list: + # The result must contain at most one other key and that key must be @index. Otherwise, an invalid set or list object error has been detected and processing is aborted. + unless (output_object.keys - KEYS_SET_LIST_INDEX).empty? + raise JsonLdError::InvalidSetOrListObject, + "@set or @list may only contain @index: #{output_object.keys.inspect}" + end + + # If result contains the key @set, then set result to the key's associated value. + return output_object['@set'] if output_object.key?('@set') + elsif output_object['@annotation'] + # Otherwise, if result contains the key @annotation, + # the array value must all be node objects without an @id property, otherwise, an invalid annotation error has been detected and processing is aborted. + unless output_object['@annotation'].all? { |o| node?(o) && !o.key?('@id') } + raise JsonLdError::InvalidAnnotation, + "@annotation must reference node objects without @id." + end + + # Additionally, the property must not be used if there is no active property, or the expanded active property is @graph. + if %w[@graph @included].include?(expanded_active_property || '@graph') + raise JsonLdError::InvalidAnnotation, + "@annotation must not be used on a top-level object." + end + end - elsif !output_object.fetch('@type', []).is_a?(Array) - # Otherwise, if result contains the key @type and its associated value is not an array, set it to an array containing only the associated value. - output_object['@type'] = [output_object['@type']] - elsif output_object.key?('@set') || output_object.key?('@list') - # Otherwise, if result contains the key @set or @list: - # The result must contain at most one other key and that key must be @index. Otherwise, an invalid set or list object error has been detected and processing is aborted. - raise JsonLdError::InvalidSetOrListObject, - "@set or @list may only contain @index: #{output_object.keys.inspect}" unless - (output_object.keys - KEYS_SET_LIST_INDEX).empty? - - # If result contains the key @set, then set result to the key's associated value. - return output_object['@set'] if output_object.key?('@set') - elsif output_object['@annotation'] - # Otherwise, if result contains the key @annotation, - # the array value must all be node objects without an @id property, otherwise, an invalid annotation error has been detected and processing is aborted. - raise JsonLdError::InvalidAnnotation, - "@annotation must reference node objects without @id." unless - output_object['@annotation'].all? {|o| node?(o) && !o.key?('@id')} - - # Additionally, the property must not be used if there is no active property, or the expanded active property is @graph. - raise JsonLdError::InvalidAnnotation, - "@annotation must not be used on a top-level object." if - %w(@graph @included).include?(expanded_active_property || '@graph') - - end - # If result contains only the key @language, set result to null. - return nil if output_object.length == 1 && output_object.key?('@language') + # If result contains only the key @language, set result to null. + return nil if output_object.length == 1 && output_object.key?('@language') - # If active property is null or @graph, drop free-floating values as follows: - if (expanded_active_property || '@graph') == '@graph' && - (output_object.key?('@value') || output_object.key?('@list') || - (output_object.keys - KEY_ID).empty? && !framing) - # log_debug(" =>", depth: log_depth.to_i) { "empty top-level: " + output_object.inspect} - return nil - end + # If active property is null or @graph, drop free-floating values as follows: + if (expanded_active_property || '@graph') == '@graph' && + (output_object.key?('@value') || output_object.key?('@list') || + ((output_object.keys - KEY_ID).empty? && !framing)) + # log_debug(" =>", depth: log_depth.to_i) { "empty top-level: " + output_object.inspect} + return nil + end - # Re-order result keys if ordering - if @options[:ordered] - output_object.keys.sort.each_with_object({}) {|kk, memo| memo[kk] = output_object[kk]} + # Re-order result keys if ordering + if @options[:ordered] + output_object.keys.sort.each_with_object({}) { |kk, memo| memo[kk] = output_object[kk] } + else + output_object + end else - output_object - end - else - # Otherwise, unless the value is a number, expand the value according to the Value Expansion rules, passing active property. - return nil if input.nil? || active_property.nil? || expanded_active_property == '@graph' - - # Apply property-scoped context - unless property_scoped_context.nil? - context = context.parse(property_scoped_context, - base: @options[:base], - override_protected: true) + # Otherwise, unless the value is a number, expand the value according to the Value Expansion rules, passing active property. + return nil if input.nil? || active_property.nil? || expanded_active_property == '@graph' + + # Apply property-scoped context + unless property_scoped_context.nil? + context = context.parse(property_scoped_context, + base: @options[:base], + override_protected: true) + end + # log_debug("expand", depth: log_depth.to_i) {"property_scoped_context: #{context.inspect}"} unless property_scoped_context.nil? + + context.expand_value(active_property, input, base: @options[:base]) end - # log_debug("expand", depth: log_depth.to_i) {"property_scoped_context: #{context.inspect}"} unless property_scoped_context.nil? - context.expand_value(active_property, input, base: @options[:base]) + # log_debug(depth: log_depth.to_i) {" => #{result.inspect}"} end - # log_debug(depth: log_depth.to_i) {" => #{result.inspect}"} - result - end + private - private - - # Expand each key and value of element adding them to result - def expand_object(input, active_property, context, output_object, - expanded_active_property:, - framing:, - type_key:, - type_scoped_context:, - log_depth: nil) - nests = [] - - input_type = Array(input[type_key]).last - input_type = context.expand_iri(input_type, vocab: true, as_string: true, base: @options[:base]) if input_type - - # Then, proceed and process each property and value in element as follows: - keys = @options[:ordered] ? input.keys.sort : input.keys - keys.each do |key| - # For each key and value in element, ordered lexicographically by key: - value = input[key] - expanded_property = context.expand_iri(key, vocab: true, base: @options[:base]) - - # If expanded property is null or it neither contains a colon (:) nor it is a keyword, drop key by continuing to the next key. - next if expanded_property.is_a?(RDF::URI) && expanded_property.relative? - expanded_property = expanded_property.to_s if expanded_property.is_a?(RDF::Resource) - - warn "[DEPRECATION] Blank Node properties deprecated in JSON-LD 1.1." if - @options[:validate] && - expanded_property.to_s.start_with?("_:") && - context.processingMode('json-ld-1.1') - - # log_debug("expand property", depth: log_depth.to_i) {"ap: #{active_property.inspect}, expanded: #{expanded_property.inspect}, value: #{value.inspect}"} - - if expanded_property.nil? - # log_debug(" => ", depth: log_depth.to_i) {"skip nil property"} - next - end + # Expand each key and value of element adding them to result + def expand_object(input, active_property, context, output_object, + expanded_active_property:, + framing:, + type_key:, + type_scoped_context:, + log_depth: nil) + nests = [] - if KEYWORDS.include?(expanded_property) - # If active property equals @reverse, an invalid reverse property map error has been detected and processing is aborted. - raise JsonLdError::InvalidReversePropertyMap, - "@reverse not appropriate at this point" if expanded_active_property == '@reverse' - - # If result has already an expanded property member (other than @type), an colliding keywords error has been detected and processing is aborted. - raise JsonLdError::CollidingKeywords, - "#{expanded_property} already exists in result" if output_object.key?(expanded_property) && !KEYS_INCLUDED_TYPE_REVERSE.include?(expanded_property) - - expanded_value = case expanded_property - when '@id' - # If expanded active property is `@annotation`, an invalid annotation error has been found and processing is aborted. - raise JsonLdError::InvalidAnnotation, - "an annotation must not contain a property expanding to @id" if - expanded_active_property == '@annotation' && @options[:rdfstar] - - # If expanded property is @id and value is not a string, an invalid @id value error has been detected and processing is aborted - e_id = case value - when String - context.expand_iri(value, as_string: true, base: @options[:base], documentRelative: true) - when Array - # Framing allows an array of IRIs, and always puts values in an array - raise JsonLdError::InvalidIdValue, - "value of @id must be a string unless framing: #{value.inspect}" unless framing - context.expand_iri(value, as_string: true, base: @options[:base], documentRelative: true) - value.map do |v| - raise JsonLdError::InvalidTypeValue, - "@id value must be a string or array of strings for framing: #{v.inspect}" unless v.is_a?(String) - context.expand_iri(v, as_string: true, base: @options[:base], documentRelative: true) + input_type = Array(input[type_key]).last + input_type = context.expand_iri(input_type, vocab: true, as_string: true, base: @options[:base]) if input_type + + # Then, proceed and process each property and value in element as follows: + keys = @options[:ordered] ? input.keys.sort : input.keys + keys.each do |key| + # For each key and value in element, ordered lexicographically by key: + value = input[key] + expanded_property = context.expand_iri(key, vocab: true, base: @options[:base]) + + # If expanded property is null or it neither contains a colon (:) nor it is a keyword, drop key by continuing to the next key. + next if expanded_property.is_a?(RDF::URI) && expanded_property.relative? + + expanded_property = expanded_property.to_s if expanded_property.is_a?(RDF::Resource) + + warn "[DEPRECATION] Blank Node properties deprecated in JSON-LD 1.1." if + @options[:validate] && + expanded_property.to_s.start_with?("_:") && + context.processingMode('json-ld-1.1') + + # log_debug("expand property", depth: log_depth.to_i) {"ap: #{active_property.inspect}, expanded: #{expanded_property.inspect}, value: #{value.inspect}"} + + if expanded_property.nil? + # log_debug(" => ", depth: log_depth.to_i) {"skip nil property"} + next + end + + if KEYWORDS.include?(expanded_property) + # If active property equals @reverse, an invalid reverse property map error has been detected and processing is aborted. + if expanded_active_property == '@reverse' + raise JsonLdError::InvalidReversePropertyMap, + "@reverse not appropriate at this point" + end + + # If result has already an expanded property member (other than @type), an colliding keywords error has been detected and processing is aborted. + if output_object.key?(expanded_property) && !KEYS_INCLUDED_TYPE_REVERSE.include?(expanded_property) + raise JsonLdError::CollidingKeywords, + "#{expanded_property} already exists in result" + end + + expanded_value = case expanded_property + when '@id' + # If expanded active property is `@annotation`, an invalid annotation error has been found and processing is aborted. + if expanded_active_property == '@annotation' && @options[:rdfstar] + raise JsonLdError::InvalidAnnotation, + "an annotation must not contain a property expanding to @id" end - when Hash - if framing - raise JsonLdError::InvalidTypeValue, - "value of @id must be a an empty object for framing: #{value.inspect}" unless - value.empty? - [{}] - elsif @options[:rdfstar] - # Result must have just a single statement - rei_node = expand(value, nil, context, log_depth: log_depth.to_i + 1) - - # Node must not contain @reverse - raise JsonLdError::InvalidEmbeddedNode, - "Embedded node with @reverse" if rei_node && rei_node.key?('@reverse') - statements = to_enum(:item_to_rdf, rei_node) - raise JsonLdError::InvalidEmbeddedNode, - "Embedded node with #{statements.size} statements" unless - statements.count == 1 - rei_node + + # If expanded property is @id and value is not a string, an invalid @id value error has been detected and processing is aborted + e_id = case value + when String + context.expand_iri(value, as_string: true, base: @options[:base], documentRelative: true) + when Array + # Framing allows an array of IRIs, and always puts values in an array + unless framing + raise JsonLdError::InvalidIdValue, + "value of @id must be a string unless framing: #{value.inspect}" + end + context.expand_iri(value, as_string: true, base: @options[:base], documentRelative: true) + value.map do |v| + unless v.is_a?(String) + raise JsonLdError::InvalidTypeValue, + "@id value must be a string or array of strings for framing: #{v.inspect}" + end + context.expand_iri(v, as_string: true, base: @options[:base], documentRelative: true) + end + when Hash + if framing + unless value.empty? + raise JsonLdError::InvalidTypeValue, + "value of @id must be a an empty object for framing: #{value.inspect}" + end + [{}] + elsif @options[:rdfstar] + # Result must have just a single statement + rei_node = expand(value, nil, context, log_depth: log_depth.to_i + 1) + + # Node must not contain @reverse + if rei_node&.key?('@reverse') + raise JsonLdError::InvalidEmbeddedNode, + "Embedded node with @reverse" + end + statements = to_enum(:item_to_rdf, rei_node) + unless statements.count == 1 + raise JsonLdError::InvalidEmbeddedNode, + "Embedded node with #{statements.size} statements" + end + rei_node + else + unless framing + raise JsonLdError::InvalidIdValue, + "value of @id must be a string unless framing: #{value.inspect}" + end + end else raise JsonLdError::InvalidIdValue, - "value of @id must be a string unless framing: #{value.inspect}" unless framing + "value of @id must be a string or hash if framing: #{value.inspect}" end - else - raise JsonLdError::InvalidIdValue, - "value of @id must be a string or hash if framing: #{value.inspect}" - end - # Use array form if framing - if framing - as_array(e_id) - else - e_id - end - when '@included' - # Included blocks are treated as an array of separate object nodes sharing the same referencing active_property. For 1.0, it is skipped as are other unknown keywords - next if context.processingMode('json-ld-1.0') - included_result = as_array(expand(value, active_property, context, - framing: framing, - log_depth: log_depth.to_i + 1)) - - # Expanded values must be node objects - raise JsonLdError::InvalidIncludedValue, "values of @included must expand to node objects" unless included_result.all? {|e| node?(e)} - # As other properties may alias to @included, add this to any other previously expanded values - Array(output_object['@included']) + included_result - when '@type' - # If expanded property is @type and value is neither a string nor an array of strings, an invalid type value error has been detected and processing is aborted. Otherwise, set expanded value to the result of using the IRI Expansion algorithm, passing active context, true for vocab, and true for document relative to expand the value or each of its items. - # log_debug("@type", depth: log_depth.to_i) {"value: #{value.inspect}"} - e_type = case value - when Array - value.map do |v| - raise JsonLdError::InvalidTypeValue, - "@type value must be a string or array of strings: #{v.inspect}" unless v.is_a?(String) - type_scoped_context.expand_iri(v, - as_string: true, - base: @options[:base], - documentRelative: true, - vocab: true) + # Use array form if framing + if framing + as_array(e_id) + else + e_id end - when String - type_scoped_context.expand_iri(value, - as_string: true, - base: @options[:base], - documentRelative: true, - vocab: true) - when Hash - if !framing - raise JsonLdError::InvalidTypeValue, - "@type value must be a string or array of strings: #{value.inspect}" - elsif value.keys.length == 1 && - type_scoped_context.expand_iri(value.keys.first, vocab: true, base: @options[:base]) == '@default' - # Expand values of @default, which must be a string, or array of strings expanding to IRIs - [{'@default' => Array(value['@default']).map do |v| - raise JsonLdError::InvalidTypeValue, - "@type default value must be a string or array of strings: #{v.inspect}" unless v.is_a?(String) + when '@included' + # Included blocks are treated as an array of separate object nodes sharing the same referencing active_property. For 1.0, it is skipped as are other unknown keywords + next if context.processingMode('json-ld-1.0') + + included_result = as_array(expand(value, active_property, context, + framing: framing, + log_depth: log_depth.to_i + 1)) + + # Expanded values must be node objects + unless included_result.all? do |e| + node?(e) + end + raise JsonLdError::InvalidIncludedValue, + "values of @included must expand to node objects" + end + + # As other properties may alias to @included, add this to any other previously expanded values + Array(output_object['@included']) + included_result + when '@type' + # If expanded property is @type and value is neither a string nor an array of strings, an invalid type value error has been detected and processing is aborted. Otherwise, set expanded value to the result of using the IRI Expansion algorithm, passing active context, true for vocab, and true for document relative to expand the value or each of its items. + # log_debug("@type", depth: log_depth.to_i) {"value: #{value.inspect}"} + e_type = case value + when Array + value.map do |v| + unless v.is_a?(String) + raise JsonLdError::InvalidTypeValue, + "@type value must be a string or array of strings: #{v.inspect}" + end type_scoped_context.expand_iri(v, as_string: true, base: @options[:base], documentRelative: true, vocab: true) - end}] - elsif !value.empty? - raise JsonLdError::InvalidTypeValue, - "@type value must be a an empty object for framing: #{value.inspect}" + end + when String + type_scoped_context.expand_iri(value, + as_string: true, + base: @options[:base], + documentRelative: true, + vocab: true) + when Hash + if !framing + raise JsonLdError::InvalidTypeValue, + "@type value must be a string or array of strings: #{value.inspect}" + elsif value.keys.length == 1 && + type_scoped_context.expand_iri(value.keys.first, vocab: true, base: @options[:base]) == '@default' + # Expand values of @default, which must be a string, or array of strings expanding to IRIs + [{ '@default' => Array(value['@default']).map do |v| + unless v.is_a?(String) + raise JsonLdError::InvalidTypeValue, + "@type default value must be a string or array of strings: #{v.inspect}" + end + type_scoped_context.expand_iri(v, + as_string: true, + base: @options[:base], + documentRelative: true, + vocab: true) + end }] + elsif !value.empty? + raise JsonLdError::InvalidTypeValue, + "@type value must be a an empty object for framing: #{value.inspect}" + else + [{}] + end else - [{}] + raise JsonLdError::InvalidTypeValue, + "@type value must be a string or array of strings: #{value.inspect}" end - else - raise JsonLdError::InvalidTypeValue, - "@type value must be a string or array of strings: #{value.inspect}" - end - e_type = Array(output_object['@type']) + Array(e_type) - # Use array form if framing - framing || e_type.length > 1 ? e_type : e_type.first - when '@graph' - # If expanded property is @graph, set expanded value to the result of using this algorithm recursively passing active context, @graph for active property, and value for element. - value = expand(value, '@graph', context, - framing: framing, - log_depth: log_depth.to_i + 1) - as_array(value) - when '@value' - # If expanded property is @value and input contains @type: json, accept any value. - # If expanded property is @value and value is not a scalar or null, an invalid value object value error has been detected and processing is aborted. (In 1.1, @value can have any JSON value of @type is @json or the property coerces to @json). - # Otherwise, set expanded value to value. If expanded value is null, set the @value member of result to null and continue with the next key from element. Null values need to be preserved in this case as the meaning of an @type member depends on the existence of an @value member. - # If framing, always use array form, unless null - if input_type == '@json' && context.processingMode('json-ld-1.1') - value - else + e_type = Array(output_object['@type']) + Array(e_type) + # Use array form if framing + framing || e_type.length > 1 ? e_type : e_type.first + when '@graph' + # If expanded property is @graph, set expanded value to the result of using this algorithm recursively passing active context, @graph for active property, and value for element. + value = expand(value, '@graph', context, + framing: framing, + log_depth: log_depth.to_i + 1) + as_array(value) + when '@value' + # If expanded property is @value and input contains @type: json, accept any value. + # If expanded property is @value and value is not a scalar or null, an invalid value object value error has been detected and processing is aborted. (In 1.1, @value can have any JSON value of @type is @json or the property coerces to @json). + # Otherwise, set expanded value to value. If expanded value is null, set the @value member of result to null and continue with the next key from element. Null values need to be preserved in this case as the meaning of an @type member depends on the existence of an @value member. + # If framing, always use array form, unless null + if input_type == '@json' && context.processingMode('json-ld-1.1') + value + else + case value + when String, TrueClass, FalseClass, Numeric then (framing ? [value] : value) + when nil + output_object['@value'] = nil + next + when Array + unless framing + raise JsonLdError::InvalidValueObjectValue, + "@value value may not be an array unless framing: #{value.inspect}" + end + value + when Hash + unless value.empty? && framing + raise JsonLdError::InvalidValueObjectValue, + "@value value must be a an empty object for framing: #{value.inspect}" + end + [value] + else + raise JsonLdError::InvalidValueObjectValue, + "Value of #{expanded_property} must be a scalar or null: #{value.inspect}" + end + end + when '@language' + # If expanded property is @language and value is not a string, an invalid language-tagged string error has been detected and processing is aborted. Otherwise, set expanded value to lowercased value. + # If framing, always use array form, unless null case value - when String, TrueClass, FalseClass, Numeric then (framing ? [value] : value) - when nil - output_object['@value'] = nil - next; + when String + unless /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/.match?(value) + warn "@language must be valid BCP47: #{value.inspect}" + end + if @options[:lowercaseLanguage] + (framing ? [value.downcase] : value.downcase) + else + (framing ? [value] : value) + end when Array - raise JsonLdError::InvalidValueObjectValue, - "@value value may not be an array unless framing: #{value.inspect}" unless framing - value + unless framing + raise JsonLdError::InvalidLanguageTaggedString, + "@language value may not be an array unless framing: #{value.inspect}" + end + value.each do |v| + unless /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/.match?(v) + warn "@language must be valid BCP47: #{v.inspect}" + end + end + @options[:lowercaseLanguage] ? value.map(&:downcase) : value when Hash - raise JsonLdError::InvalidValueObjectValue, - "@value value must be a an empty object for framing: #{value.inspect}" unless - value.empty? && framing + unless value.empty? && framing + raise JsonLdError::InvalidLanguageTaggedString, + "@language value must be a an empty object for framing: #{value.inspect}" + end [value] else - raise JsonLdError::InvalidValueObjectValue, - "Value of #{expanded_property} must be a scalar or null: #{value.inspect}" - end - end - when '@language' - # If expanded property is @language and value is not a string, an invalid language-tagged string error has been detected and processing is aborted. Otherwise, set expanded value to lowercased value. - # If framing, always use array form, unless null - case value - when String - if value !~ /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/ - warn "@language must be valid BCP47: #{value.inspect}" + raise JsonLdError::InvalidLanguageTaggedString, + "Value of #{expanded_property} must be a string: #{value.inspect}" end - if @options[:lowercaseLanguage] - (framing ? [value.downcase] : value.downcase) + when '@direction' + # If expanded property is @direction and value is not either 'ltr' or 'rtl', an invalid base direction error has been detected and processing is aborted. Otherwise, set expanded value to value. + # If framing, always use array form, unless null + case value + when 'ltr', 'rtl' then (framing ? [value] : value) + when Array + unless framing + raise JsonLdError::InvalidBaseDirection, + "@direction value may not be an array unless framing: #{value.inspect}" + end + unless value.all? do |v| + %w[ + ltr rtl + ].include?(v) || (v.is_a?(Hash) && v.empty?) + end + raise JsonLdError::InvalidBaseDirection, + "@direction must be one of 'ltr', 'rtl', or an array of those if framing #{value.inspect}" + end + value + when Hash + unless value.empty? && framing + raise JsonLdError::InvalidBaseDirection, + "@direction value must be a an empty object for framing: #{value.inspect}" + end + [value] else - (framing ? [value] : value) + raise JsonLdError::InvalidBaseDirection, + "Value of #{expanded_property} must be one of 'ltr' or 'rtl': #{value.inspect}" end - when Array - raise JsonLdError::InvalidLanguageTaggedString, - "@language value may not be an array unless framing: #{value.inspect}" unless framing - value.each do |v| - if v !~ /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/ - warn "@language must be valid BCP47: #{v.inspect}" - end + when '@index' + # If expanded property is @index and value is not a string, an invalid @index value error has been detected and processing is aborted. Otherwise, set expanded value to value. + unless value.is_a?(String) + raise JsonLdError::InvalidIndexValue, + "Value of @index is not a string: #{value.inspect}" end - @options[:lowercaseLanguage] ? value.map(&:downcase) : value - when Hash - raise JsonLdError::InvalidLanguageTaggedString, - "@language value must be a an empty object for framing: #{value.inspect}" unless - value.empty? && framing - [value] - else - raise JsonLdError::InvalidLanguageTaggedString, - "Value of #{expanded_property} must be a string: #{value.inspect}" - end - when '@direction' - # If expanded property is @direction and value is not either 'ltr' or 'rtl', an invalid base direction error has been detected and processing is aborted. Otherwise, set expanded value to value. - # If framing, always use array form, unless null - case value - when 'ltr', 'rtl' then (framing ? [value] : value) - when Array - raise JsonLdError::InvalidBaseDirection, - "@direction value may not be an array unless framing: #{value.inspect}" unless framing - raise JsonLdError::InvalidBaseDirection, - "@direction must be one of 'ltr', 'rtl', or an array of those if framing #{value.inspect}" unless value.all? {|v| %w(ltr rtl).include?(v) || v.is_a?(Hash) && v.empty?} value - when Hash - raise JsonLdError::InvalidBaseDirection, - "@direction value must be a an empty object for framing: #{value.inspect}" unless - value.empty? && framing - [value] - else - raise JsonLdError::InvalidBaseDirection, - "Value of #{expanded_property} must be one of 'ltr' or 'rtl': #{value.inspect}" - end - when '@index' - # If expanded property is @index and value is not a string, an invalid @index value error has been detected and processing is aborted. Otherwise, set expanded value to value. - raise JsonLdError::InvalidIndexValue, - "Value of @index is not a string: #{value.inspect}" unless value.is_a?(String) - value - when '@list' - # If expanded property is @graph: - - # If active property is null or @graph, continue with the next key from element to remove the free-floating list. - next if (expanded_active_property || '@graph') == '@graph' - - # Otherwise, initialize expanded value to the result of using this algorithm recursively passing active context, active property, and value for element. - value = expand(value, active_property, context, - framing: framing, - log_depth: log_depth.to_i + 1) + when '@list' + # If expanded property is @graph: - # Spec FIXME: need to be sure that result is an array - value = as_array(value) + # If active property is null or @graph, continue with the next key from element to remove the free-floating list. + next if (expanded_active_property || '@graph') == '@graph' - # Make sure that no member of value contains an annotation object - raise JsonLdError::InvalidAnnotation, - "A list element must not contain @annotation." if - value.any? {|n| n.is_a?(Hash) && n.key?('@annotation')} + # Otherwise, initialize expanded value to the result of using this algorithm recursively passing active context, active property, and value for element. + value = expand(value, active_property, context, + framing: framing, + log_depth: log_depth.to_i + 1) - value - when '@set' - # If expanded property is @set, set expanded value to the result of using this algorithm recursively, passing active context, active property, and value for element. - expand(value, active_property, context, - framing: framing, - log_depth: log_depth.to_i + 1) - when '@reverse' - # If expanded property is @reverse and value is not a JSON object, an invalid @reverse value error has been detected and processing is aborted. - raise JsonLdError::InvalidReverseValue, - "@reverse value must be an object: #{value.inspect}" unless value.is_a?(Hash) - - # Otherwise - # Initialize expanded value to the result of using this algorithm recursively, passing active context, @reverse as active property, and value as element. - value = expand(value, '@reverse', context, - framing: framing, - log_depth: log_depth.to_i + 1) + # Spec FIXME: need to be sure that result is an array + value = as_array(value) - # If expanded value contains an @reverse member, i.e., properties that are reversed twice, execute for each of its property and item the following steps: - if value.key?('@reverse') - # log_debug("@reverse", depth: log_depth.to_i) {"double reverse: #{value.inspect}"} - value['@reverse'].each do |property, item| - # If result does not have a property member, create one and set its value to an empty array. - # Append item to the value of the property member of result. - (output_object[property] ||= []).concat([item].flatten.compact) + # Make sure that no member of value contains an annotation object + if value.any? { |n| n.is_a?(Hash) && n.key?('@annotation') } + raise JsonLdError::InvalidAnnotation, + "A list element must not contain @annotation." + end + + value + when '@set' + # If expanded property is @set, set expanded value to the result of using this algorithm recursively, passing active context, active property, and value for element. + expand(value, active_property, context, + framing: framing, + log_depth: log_depth.to_i + 1) + when '@reverse' + # If expanded property is @reverse and value is not a JSON object, an invalid @reverse value error has been detected and processing is aborted. + unless value.is_a?(Hash) + raise JsonLdError::InvalidReverseValue, + "@reverse value must be an object: #{value.inspect}" + end + + # Otherwise + # Initialize expanded value to the result of using this algorithm recursively, passing active context, @reverse as active property, and value as element. + value = expand(value, '@reverse', context, + framing: framing, + log_depth: log_depth.to_i + 1) + + # If expanded value contains an @reverse member, i.e., properties that are reversed twice, execute for each of its property and item the following steps: + if value.key?('@reverse') + # log_debug("@reverse", depth: log_depth.to_i) {"double reverse: #{value.inspect}"} + value['@reverse'].each do |property, item| + # If result does not have a property member, create one and set its value to an empty array. + # Append item to the value of the property member of result. + (output_object[property] ||= []).concat([item].flatten.compact) + end end - end - # If expanded value contains members other than @reverse: - if !value.key?('@reverse') || value.length > 1 - # If result does not have an @reverse member, create one and set its value to an empty JSON object. - reverse_map = output_object['@reverse'] ||= {} - value.each do |property, items| - next if property == '@reverse' - items.each do |item| - if value?(item) || list?(item) - raise JsonLdError::InvalidReversePropertyValue, - item.inspect + # If expanded value contains members other than @reverse: + if !value.key?('@reverse') || value.length > 1 + # If result does not have an @reverse member, create one and set its value to an empty JSON object. + reverse_map = output_object['@reverse'] ||= {} + value.each do |property, items| + next if property == '@reverse' + + items.each do |item| + if value?(item) || list?(item) + raise JsonLdError::InvalidReversePropertyValue, + item.inspect + end + merge_value(reverse_map, property, item) end - merge_value(reverse_map, property, item) end end + + # Continue with the next key from element + next + when '@default', '@embed', '@explicit', '@omitDefault', '@preserve', '@requireAll' + next unless framing + + # Framing keywords + [expand(value, expanded_property, context, + framing: framing, + log_depth: log_depth.to_i + 1)].flatten + when '@nest' + # Add key to nests + nests << key + # Continue with the next key from element + next + when '@annotation' + # Skip unless rdfstar option is set + next unless @options[:rdfstar] + + as_array(expand(value, '@annotation', context, + framing: framing, + log_depth: log_depth.to_i + 1)) + else + # Skip unknown keyword + next end - # Continue with the next key from element - next - when '@default', '@embed', '@explicit', '@omitDefault', '@preserve', '@requireAll' - next unless framing - # Framing keywords - [expand(value, expanded_property, context, - framing: framing, - log_depth: log_depth.to_i + 1) - ].flatten - when '@nest' - # Add key to nests - nests << key - # Continue with the next key from element - next - when '@annotation' - # Skip unless rdfstar option is set - next unless @options[:rdfstar] - as_array(expand(value, '@annotation', context, - framing: framing, - log_depth: log_depth.to_i + 1)) - else - # Skip unknown keyword + # Unless expanded value is null, set the expanded property member of result to expanded value. + # log_debug("expand #{expanded_property}", depth: log_depth.to_i) { expanded_value.inspect} + unless expanded_value.nil? && expanded_property == '@value' && input_type != '@json' + output_object[expanded_property] = + expanded_value + end next end - # Unless expanded value is null, set the expanded property member of result to expanded value. - # log_debug("expand #{expanded_property}", depth: log_depth.to_i) { expanded_value.inspect} - output_object[expanded_property] = expanded_value unless expanded_value.nil? && expanded_property == '@value' && input_type != '@json' - next - end + container = context.container(key) + expanded_value = if context.coerce(key) == '@json' + # In JSON-LD 1.1, values can be native JSON + { "@value" => value, "@type" => "@json" } + elsif container.include?('@language') && value.is_a?(Hash) + # Otherwise, if key's container mapping in active context is @language and value is a JSON object then value is expanded from a language map as follows: - container = context.container(key) - expanded_value = if context.coerce(key) == '@json' - # In JSON-LD 1.1, values can be native JSON - {"@value" => value, "@type" => "@json"} - elsif container.include?('@language') && value.is_a?(Hash) - # Otherwise, if key's container mapping in active context is @language and value is a JSON object then value is expanded from a language map as follows: - - # Set multilingual array to an empty array. - ary = [] - - # For each key-value pair language-language value in value, ordered lexicographically by language - keys = @options[:ordered] ? value.keys.sort : value.keys - keys.each do |k| - expanded_k = context.expand_iri(k, vocab: true, as_string: true, base: @options[:base]) - - if k !~ /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/ && expanded_k != '@none' - warn "@language must be valid BCP47: #{k.inspect}" - end + # Set multilingual array to an empty array. + ary = [] - [value[k]].flatten.each do |item| - # item must be a string, otherwise an invalid language map value error has been detected and processing is aborted. - raise JsonLdError::InvalidLanguageMapValue, - "Expected #{item.inspect} to be a string" unless item.nil? || item.is_a?(String) + # For each key-value pair language-language value in value, ordered lexicographically by language + keys = @options[:ordered] ? value.keys.sort : value.keys + keys.each do |k| + expanded_k = context.expand_iri(k, vocab: true, as_string: true, base: @options[:base]) - # Append a JSON object to expanded value that consists of two key-value pairs: (@value-item) and (@language-lowercased language). - v = {'@value' => item} - v['@language'] = (@options[:lowercaseLanguage] ? k.downcase : k) unless expanded_k == '@none' - v['@direction'] = context.direction(key) if context.direction(key) - ary << v if item - end - end + if k !~ /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/ && expanded_k != '@none' + warn "@language must be valid BCP47: #{k.inspect}" + end - ary - elsif container.intersect?(CONTAINER_INDEX_ID_TYPE) && value.is_a?(Hash) - # Otherwise, if key's container mapping in active context contains @index, @id, @type and value is a JSON object then value is expanded from an index map as follows: - - # Set ary to an empty array. - ary = [] - index_key = context.term_definitions[key].index || '@index' - - # While processing index keys, if container includes @type, clear type-scoped term definitions - container_context = if container.include?('@type') && context.previous_context - context.previous_context - elsif container.include?('@id') && context.term_definitions[key] - id_context = context.term_definitions[key].context if context.term_definitions[key] - if id_context.nil? - context - else - # log_debug("expand", depth: log_depth.to_i) {"id_context: #{id_context.inspect}"} - context.parse(id_context, base: @options[:base], propagate: false) - end - else - context - end + [value[k]].flatten.each do |item| + # item must be a string, otherwise an invalid language map value error has been detected and processing is aborted. + unless item.nil? || item.is_a?(String) + raise JsonLdError::InvalidLanguageMapValue, + "Expected #{item.inspect} to be a string" + end - # For each key-value in the object: - keys = @options[:ordered] ? value.keys.sort : value.keys - keys.each do |k| - # If container mapping in the active context includes @type, and k is a term in the active context having a local context, use that context when expanding values - map_context = container_context.term_definitions[k].context if container.include?('@type') && container_context.term_definitions[k] - unless map_context.nil? - # log_debug("expand", depth: log_depth.to_i) {"map_context: #{map_context.inspect}"} - map_context = container_context.parse(map_context, base: @options[:base], - propagate: false) + # Append a JSON object to expanded value that consists of two key-value pairs: (@value-item) and (@language-lowercased language). + v = { '@value' => item } + v['@language'] = (@options[:lowercaseLanguage] ? k.downcase : k) unless expanded_k == '@none' + v['@direction'] = context.direction(key) if context.direction(key) + ary << v if item + end end - map_context ||= container_context - expanded_k = container_context.expand_iri(k, vocab: true, as_string: true, base: @options[:base]) + ary + elsif container.intersect?(CONTAINER_INDEX_ID_TYPE) && value.is_a?(Hash) + # Otherwise, if key's container mapping in active context contains @index, @id, @type and value is a JSON object then value is expanded from an index map as follows: + + # Set ary to an empty array. + ary = [] + index_key = context.term_definitions[key].index || '@index' + + # While processing index keys, if container includes @type, clear type-scoped term definitions + container_context = if container.include?('@type') && context.previous_context + context.previous_context + elsif container.include?('@id') && context.term_definitions[key] + id_context = context.term_definitions[key].context if context.term_definitions[key] + if id_context.nil? + context + else + # log_debug("expand", depth: log_depth.to_i) {"id_context: #{id_context.inspect}"} + context.parse(id_context, base: @options[:base], propagate: false) + end + else + context + end - # Initialize index value to the result of using this algorithm recursively, passing active context, key as active property, and index value as element. - index_value = expand([value[k]].flatten, key, map_context, - framing: framing, - from_map: true, - log_depth: log_depth.to_i + 1) - index_value.each do |item| - case - when container.include?('@index') - # Indexed graph by graph name - if !graph?(item) && container.include?('@graph') - item = {'@graph' => as_array(item)} - end - if index_key == '@index' - item['@index'] ||= k unless expanded_k == '@none' - elsif value?(item) - raise JsonLdError::InvalidValueObject, "Attempt to add illegal key to value object: #{index_key}" - else - # Expand key based on term - expanded_k = k == '@none' ? '@none' : container_context.expand_value(index_key, k, base: @options[:base]) - index_property = container_context.expand_iri(index_key, vocab: true, as_string: true, base: @options[:base]) - item[index_property] = [expanded_k].concat(Array(item[index_property])) unless expanded_k == '@none' - end - when container.include?('@id') - # Indexed graph by graph name - if !graph?(item) && container.include?('@graph') - item = {'@graph' => as_array(item)} - end - # Expand k document relative - expanded_k = container_context.expand_iri(k, as_string: true, base: @options[:base], documentRelative: true) unless expanded_k == '@none' - item['@id'] ||= expanded_k unless expanded_k == '@none' - when container.include?('@type') - item['@type'] = [expanded_k].concat(Array(item['@type'])) unless expanded_k == '@none' + # For each key-value in the object: + keys = @options[:ordered] ? value.keys.sort : value.keys + keys.each do |k| + # If container mapping in the active context includes @type, and k is a term in the active context having a local context, use that context when expanding values + if container.include?('@type') && container_context.term_definitions[k] + map_context = container_context.term_definitions[k].context + end + unless map_context.nil? + # log_debug("expand", depth: log_depth.to_i) {"map_context: #{map_context.inspect}"} + map_context = container_context.parse(map_context, base: @options[:base], + propagate: false) end + map_context ||= container_context + + expanded_k = container_context.expand_iri(k, vocab: true, as_string: true, base: @options[:base]) + + # Initialize index value to the result of using this algorithm recursively, passing active context, key as active property, and index value as element. + index_value = expand([value[k]].flatten, key, map_context, + framing: framing, + from_map: true, + log_depth: log_depth.to_i + 1) + index_value.each do |item| + if container.include?('@index') + # Indexed graph by graph name + item = { '@graph' => as_array(item) } if !graph?(item) && container.include?('@graph') + if index_key == '@index' + item['@index'] ||= k unless expanded_k == '@none' + elsif value?(item) + raise JsonLdError::InvalidValueObject, "Attempt to add illegal key to value object: #{index_key}" + else + # Expand key based on term + expanded_k = if k == '@none' + '@none' + else + container_context.expand_value(index_key, k, + base: @options[:base]) + end + index_property = container_context.expand_iri(index_key, vocab: true, as_string: true, + base: @options[:base]) + item[index_property] = [expanded_k].concat(Array(item[index_property])) unless expanded_k == '@none' + end + elsif container.include?('@id') + # Indexed graph by graph name + item = { '@graph' => as_array(item) } if !graph?(item) && container.include?('@graph') + # Expand k document relative + unless expanded_k == '@none' + expanded_k = container_context.expand_iri(k, as_string: true, base: @options[:base], + documentRelative: true) + end + item['@id'] ||= expanded_k unless expanded_k == '@none' + elsif container.include?('@type') + item['@type'] = [expanded_k].concat(Array(item['@type'])) unless expanded_k == '@none' + end - # Append item to expanded value. - ary << item + # Append item to expanded value. + ary << item + end end + ary + else + # Otherwise, initialize expanded value to the result of using this algorithm recursively, passing active context, key for active property, and value for element. + expand(value, key, context, + framing: framing, + log_depth: log_depth.to_i + 1) end - ary - else - # Otherwise, initialize expanded value to the result of using this algorithm recursively, passing active context, key for active property, and value for element. - expand(value, key, context, - framing: framing, - log_depth: log_depth.to_i + 1) - end - # If expanded value is null, ignore key by continuing to the next key from element. - if expanded_value.nil? - # log_debug(" => skip nil value", depth: log_depth.to_i) - next - end - # log_debug(depth: log_depth.to_i) {" => #{expanded_value.inspect}"} + # If expanded value is null, ignore key by continuing to the next key from element. + if expanded_value.nil? + # log_debug(" => skip nil value", depth: log_depth.to_i) + next + end - # If the container mapping associated to key in active context is @list and expanded value is not already a list object, convert expanded value to a list object by first setting it to an array containing only expanded value if it is not already an array, and then by setting it to a JSON object containing the key-value pair @list-expanded value. - if container.first == '@list' && container.length == 1 && !list?(expanded_value) - # log_debug(" => ", depth: log_depth.to_i) { "convert #{expanded_value.inspect} to list"} - expanded_value = {'@list' => as_array(expanded_value)} - end - # log_debug(depth: log_depth.to_i) {" => #{expanded_value.inspect}"} + # log_debug(depth: log_depth.to_i) {" => #{expanded_value.inspect}"} - # convert expanded value to @graph if container specifies it - if container.first == '@graph' && container.length == 1 - # log_debug(" => ", depth: log_depth.to_i) { "convert #{expanded_value.inspect} to list"} - expanded_value = as_array(expanded_value).map do |v| - {'@graph' => as_array(v)} + # If the container mapping associated to key in active context is @list and expanded value is not already a list object, convert expanded value to a list object by first setting it to an array containing only expanded value if it is not already an array, and then by setting it to a JSON object containing the key-value pair @list-expanded value. + if container.first == '@list' && container.length == 1 && !list?(expanded_value) + # log_debug(" => ", depth: log_depth.to_i) { "convert #{expanded_value.inspect} to list"} + expanded_value = { '@list' => as_array(expanded_value) } end - end + # log_debug(depth: log_depth.to_i) {" => #{expanded_value.inspect}"} - # Otherwise, if the term definition associated to key indicates that it is a reverse property - # Spec FIXME: this is not an otherwise. - if (td = context.term_definitions[key]) && td.reverse_property - # If result has no @reverse member, create one and initialize its value to an empty JSON object. - reverse_map = output_object['@reverse'] ||= {} - [expanded_value].flatten.each do |item| - # If item is a value object or list object, an invalid reverse property value has been detected and processing is aborted. - raise JsonLdError::InvalidReversePropertyValue, - item.inspect if value?(item) || list?(item) - - # If reverse map has no expanded property member, create one and initialize its value to an empty array. - # Append item to the value of the expanded property member of reverse map. - merge_value(reverse_map, expanded_property, item) + # convert expanded value to @graph if container specifies it + if container.first == '@graph' && container.length == 1 + # log_debug(" => ", depth: log_depth.to_i) { "convert #{expanded_value.inspect} to list"} + expanded_value = as_array(expanded_value).map do |v| + { '@graph' => as_array(v) } + end end - else - # Otherwise, if key is not a reverse property: - # If result does not have an expanded property member, create one and initialize its value to an empty array. - (output_object[expanded_property] ||= []).tap do |memo| - # expanded_value is either Array[Hash] or Hash; in both case append to memo without flatten - if expanded_value.is_a?(Array) - memo.concat(expanded_value) - else # Hash - memo << expanded_value + + # Otherwise, if the term definition associated to key indicates that it is a reverse property + # Spec FIXME: this is not an otherwise. + if (td = context.term_definitions[key]) && td.reverse_property + # If result has no @reverse member, create one and initialize its value to an empty JSON object. + reverse_map = output_object['@reverse'] ||= {} + [expanded_value].flatten.each do |item| + # If item is a value object or list object, an invalid reverse property value has been detected and processing is aborted. + if value?(item) || list?(item) + raise JsonLdError::InvalidReversePropertyValue, + item.inspect + end + + # If reverse map has no expanded property member, create one and initialize its value to an empty array. + # Append item to the value of the expanded property member of reverse map. + merge_value(reverse_map, expanded_property, item) + end + else + # Otherwise, if key is not a reverse property: + # If result does not have an expanded property member, create one and initialize its value to an empty array. + (output_object[expanded_property] ||= []).tap do |memo| + # expanded_value is either Array[Hash] or Hash; in both case append to memo without flatten + if expanded_value.is_a?(Array) + memo.concat(expanded_value) + else # Hash + memo << expanded_value + end end end end - end - # For each key in nests, recusively expand content - nests.each do |key| - nest_context = context.term_definitions[key].context if context.term_definitions[key] - nest_context = if nest_context.nil? - context - else - # log_debug("expand", depth: log_depth.to_i) {"nest_context: #{nest_context.inspect}"} - context.parse(nest_context, base: @options[:base], - override_protected: true) - end - nested_values = as_array(input[key]) - nested_values.each do |nv| - raise JsonLdError::InvalidNestValue, nv.inspect unless - nv.is_a?(Hash) && nv.keys.none? {|k| nest_context.expand_iri(k, vocab: true, base: @options[:base]) == '@value'} - expand_object(nv, active_property, nest_context, output_object, - framing: framing, - expanded_active_property: expanded_active_property, - type_key: type_key, - type_scoped_context: type_scoped_context, - log_depth: log_depth.to_i + 1) + # For each key in nests, recusively expand content + nests.each do |key| + nest_context = context.term_definitions[key].context if context.term_definitions[key] + nest_context = if nest_context.nil? + context + else + # log_debug("expand", depth: log_depth.to_i) {"nest_context: #{nest_context.inspect}"} + context.parse(nest_context, base: @options[:base], + override_protected: true) + end + nested_values = as_array(input[key]) + nested_values.each do |nv| + raise JsonLdError::InvalidNestValue, nv.inspect unless + nv.is_a?(Hash) && nv.keys.none? do |k| + nest_context.expand_iri(k, vocab: true, base: @options[:base]) == '@value' + end + + expand_object(nv, active_property, nest_context, output_object, + framing: framing, + expanded_active_property: expanded_active_property, + type_key: type_key, + type_scoped_context: type_scoped_context, + log_depth: log_depth.to_i + 1) + end end end end diff --git a/lib/json/ld/extensions.rb b/lib/json/ld/extensions.rb index e24f46cc..0bca501f 100644 --- a/lib/json/ld/extensions.rb +++ b/lib/json/ld/extensions.rb @@ -1,42 +1,43 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true + module RDF class Node # Odd case of appending to a BNode identifier - def +(value) - Node.new(id + value.to_s) + def +(other) + Node.new(id + other.to_s) end end class Statement # Validate extended RDF def valid_extended? - subject? && subject.resource? && subject.valid_extended? && - predicate? && predicate.resource? && predicate.valid_extended? && - object? && object.term? && object.valid_extended? && - (graph? ? (graph_name.resource? && graph_name.valid_extended?) : true) + subject? && subject.resource? && subject.valid_extended? && + predicate? && predicate.resource? && predicate.valid_extended? && + object? && object.term? && object.valid_extended? && + (graph? ? (graph_name.resource? && graph_name.valid_extended?) : true) end end - class URI + class URI # Validate extended RDF def valid_extended? - self.valid? + valid? end end - class Node + class Node # Validate extended RDF def valid_extended? - self.valid? + valid? end end - class Literal + class Literal # Validate extended RDF def valid_extended? return false if language? && language.to_s !~ /^[a-zA-Z]+(-[a-zA-Z0-9]+)*$/ return false if datatype? && datatype.invalid? + value.is_a?(String) end end @@ -48,6 +49,6 @@ class Array # @param [Boolean] ordered # @return [Array] def opt_sort(ordered: false) - ordered ? self.sort : self + ordered ? sort : self end end diff --git a/lib/json/ld/flatten.rb b/lib/json/ld/flatten.rb index 5a348c45..0eeb40f9 100644 --- a/lib/json/ld/flatten.rb +++ b/lib/json/ld/flatten.rb @@ -1,289 +1,299 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true + require 'json/canonicalization' -module JSON::LD - module Flatten - include Utils - - ## - # This algorithm creates a JSON object node map holding an indexed representation of the graphs and nodes represented in the passed expanded document. All nodes that are not uniquely identified by an IRI get assigned a (new) blank node identifier. The resulting node map will have a member for every graph in the document whose value is another object with a member for every node represented in the document. The default graph is stored under the @default member, all other graphs are stored under their graph name. - # - # For RDF-star/JSON-LD-star: - # * Values of `@id` can be an object (embedded node); when these are used as keys in a Node Map, they are serialized as canonical JSON, and de-serialized when flattening. - # * The presence of `@annotation` implies an embedded node and the annotation object is removed from the node/value object in which it appears. - # - # @param [Array, Hash] element - # Expanded JSON-LD input - # @param [Hash] graph_map A map of graph name to subjects - # @param [String] active_graph - # The name of the currently active graph that the processor should use when processing. - # @param [String] active_subject (nil) - # Node identifier - # @param [String] active_property (nil) - # Property within current node - # @param [Boolean] reverse (false) - # Processing a reverse relationship - # @param [Array] list (nil) - # Used when property value is a list - def create_node_map(element, graph_map, - active_graph: '@default', - active_subject: nil, - active_property: nil, - reverse: false, - list: nil) - if element.is_a?(Array) - # If element is an array, process each entry in element recursively by passing item for element, node map, active graph, active subject, active property, and list. - element.map do |o| - create_node_map(o, graph_map, - active_graph: active_graph, - active_subject: active_subject, - active_property: active_property, +module JSON + module LD + module Flatten + include Utils + + ## + # This algorithm creates a JSON object node map holding an indexed representation of the graphs and nodes represented in the passed expanded document. All nodes that are not uniquely identified by an IRI get assigned a (new) blank node identifier. The resulting node map will have a member for every graph in the document whose value is another object with a member for every node represented in the document. The default graph is stored under the @default member, all other graphs are stored under their graph name. + # + # For RDF-star/JSON-LD-star: + # * Values of `@id` can be an object (embedded node); when these are used as keys in a Node Map, they are serialized as canonical JSON, and de-serialized when flattening. + # * The presence of `@annotation` implies an embedded node and the annotation object is removed from the node/value object in which it appears. + # + # @param [Array, Hash] element + # Expanded JSON-LD input + # @param [Hash] graph_map A map of graph name to subjects + # @param [String] active_graph + # The name of the currently active graph that the processor should use when processing. + # @param [String] active_subject (nil) + # Node identifier + # @param [String] active_property (nil) + # Property within current node + # @param [Boolean] reverse (false) + # Processing a reverse relationship + # @param [Array] list (nil) + # Used when property value is a list + def create_node_map(element, graph_map, + active_graph: '@default', + active_subject: nil, + active_property: nil, reverse: false, - list: list) - end - elsif !element.is_a?(Hash) - raise "Expected hash or array to create_node_map, got #{element.inspect}" - else - graph = (graph_map[active_graph] ||= {}) - subject_node = !reverse && graph[active_subject.is_a?(Hash) ? active_subject.to_json_c14n : active_subject] - - # Transform BNode types - if element.key?('@type') - element['@type'] = Array(element['@type']).map {|t| blank_node?(t) ? namer.get_name(t) : t} - end - - if value?(element) - element['@type'] = element['@type'].first if element ['@type'] - - # For rdfstar, if value contains an `@annotation` member ... - # note: active_subject will not be nil, and may be an object itself. - if element.key?('@annotation') - # rdfstar being true is implicit, as it is checked in expansion - as = node_reference?(active_subject) ? - active_subject['@id'] : - active_subject - star_subject = { - "@id" => as, - active_property => [element] - } - - # Note that annotation is an array, make the reified subject the id of each member of that array. - annotation = element.delete('@annotation').map do |a| - a.merge('@id' => star_subject) - end - - # Invoke recursively using annotation. - create_node_map(annotation, graph_map, - active_graph: active_graph) - end - - if list.nil? - add_value(subject_node, active_property, element, property_is_array: true, allow_duplicate: false) - else - list['@list'] << element - end - elsif list?(element) - result = {'@list' => []} - create_node_map(element['@list'], graph_map, - active_graph: active_graph, - active_subject: active_subject, - active_property: active_property, - list: result) - if list.nil? - add_value(subject_node, active_property, result, property_is_array: true) - else - list['@list'] << result + list: nil) + if element.is_a?(Array) + # If element is an array, process each entry in element recursively by passing item for element, node map, active graph, active subject, active property, and list. + element.map do |o| + create_node_map(o, graph_map, + active_graph: active_graph, + active_subject: active_subject, + active_property: active_property, + reverse: false, + list: list) end + elsif !element.is_a?(Hash) + raise "Expected hash or array to create_node_map, got #{element.inspect}" else - # Element is a node object - ser_id = id = element.delete('@id') - if id.is_a?(Hash) - # Index graph using serialized id - ser_id = id.to_json_c14n - elsif id.nil? - ser_id = id = namer.get_name + graph = (graph_map[active_graph] ||= {}) + subject_node = !reverse && graph[active_subject.is_a?(Hash) ? active_subject.to_json_c14n : active_subject] + + # Transform BNode types + if element.key?('@type') + element['@type'] = Array(element['@type']).map { |t| blank_node?(t) ? namer.get_name(t) : t } end - node = graph[ser_id] ||= {'@id' => id} + if value?(element) + element['@type'] = element['@type'].first if element['@type'] + + # For rdfstar, if value contains an `@annotation` member ... + # note: active_subject will not be nil, and may be an object itself. + if element.key?('@annotation') + # rdfstar being true is implicit, as it is checked in expansion + as = if node_reference?(active_subject) + active_subject['@id'] + else + active_subject + end + star_subject = { + "@id" => as, + active_property => [element] + } + + # Note that annotation is an array, make the reified subject the id of each member of that array. + annotation = element.delete('@annotation').map do |a| + a.merge('@id' => star_subject) + end + + # Invoke recursively using annotation. + create_node_map(annotation, graph_map, + active_graph: active_graph) + end - if reverse - # Note: active_subject is a Hash - # We're processing a reverse-property relationship. - add_value(node, active_property, active_subject, property_is_array: true, allow_duplicate: false) - elsif active_property - reference = {'@id' => id} if list.nil? - add_value(subject_node, active_property, reference, property_is_array: true, allow_duplicate: false) + add_value(subject_node, active_property, element, property_is_array: true, allow_duplicate: false) else - list['@list'] << reference + list['@list'] << element + end + elsif list?(element) + result = { '@list' => [] } + create_node_map(element['@list'], graph_map, + active_graph: active_graph, + active_subject: active_subject, + active_property: active_property, + list: result) + if list.nil? + add_value(subject_node, active_property, result, property_is_array: true) + else + list['@list'] << result + end + else + # Element is a node object + ser_id = id = element.delete('@id') + if id.is_a?(Hash) + # Index graph using serialized id + ser_id = id.to_json_c14n + elsif id.nil? + ser_id = id = namer.get_name end - end - # For rdfstar, if node contains an `@annotation` member ... - # note: active_subject will not be nil, and may be an object itself. - # XXX: what if we're reversing an annotation? - if element.key?('@annotation') - # rdfstar being true is implicit, as it is checked in expansion - as = node_reference?(active_subject) ? - active_subject['@id'] : - active_subject - star_subject = reverse ? - {"@id" => node['@id'], active_property => [{'@id' => as}]} : - {"@id" => as, active_property => [{'@id' => node['@id']}]} - - # Note that annotation is an array, make the reified subject the id of each member of that array. - annotation = element.delete('@annotation').map do |a| - a.merge('@id' => star_subject) + node = graph[ser_id] ||= { '@id' => id } + + if reverse + # NOTE: active_subject is a Hash + # We're processing a reverse-property relationship. + add_value(node, active_property, active_subject, property_is_array: true, allow_duplicate: false) + elsif active_property + reference = { '@id' => id } + if list.nil? + add_value(subject_node, active_property, reference, property_is_array: true, allow_duplicate: false) + else + list['@list'] << reference + end end - # Invoke recursively using annotation. - create_node_map(annotation, graph_map, - active_graph: active_graph, - active_subject: star_subject) - end + # For rdfstar, if node contains an `@annotation` member ... + # note: active_subject will not be nil, and may be an object itself. + # XXX: what if we're reversing an annotation? + if element.key?('@annotation') + # rdfstar being true is implicit, as it is checked in expansion + as = if node_reference?(active_subject) + active_subject['@id'] + else + active_subject + end + star_subject = if reverse + { "@id" => node['@id'], active_property => [{ '@id' => as }] } + else + { "@id" => as, active_property => [{ '@id' => node['@id'] }] } + end - if element.key?('@type') - add_value(node, '@type', element.delete('@type'), property_is_array: true, allow_duplicate: false) - end + # Note that annotation is an array, make the reified subject the id of each member of that array. + annotation = element.delete('@annotation').map do |a| + a.merge('@id' => star_subject) + end - if element['@index'] - raise JsonLdError::ConflictingIndexes, - "Element already has index #{node['@index']} dfferent from #{element['@index']}" if - node.key?('@index') && node['@index'] != element['@index'] - node['@index'] = element.delete('@index') - end + # Invoke recursively using annotation. + create_node_map(annotation, graph_map, + active_graph: active_graph, + active_subject: star_subject) + end - if element['@reverse'] - referenced_node, reverse_map = {'@id' => id}, element.delete('@reverse') - reverse_map.each do |property, values| - values.each do |value| - create_node_map(value, graph_map, - active_graph: active_graph, - active_subject: referenced_node, - active_property: property, - reverse: true) + if element.key?('@type') + add_value(node, '@type', element.delete('@type'), property_is_array: true, allow_duplicate: false) + end + + if element['@index'] + if node.key?('@index') && node['@index'] != element['@index'] + raise JsonLdError::ConflictingIndexes, + "Element already has index #{node['@index']} dfferent from #{element['@index']}" end + node['@index'] = element.delete('@index') end - end - if element['@graph'] - create_node_map(element.delete('@graph'), graph_map, - active_graph: id) - end + if element['@reverse'] + referenced_node = { '@id' => id } + reverse_map = element.delete('@reverse') + reverse_map.each do |property, values| + values.each do |value| + create_node_map(value, graph_map, + active_graph: active_graph, + active_subject: referenced_node, + active_property: property, + reverse: true) + end + end + end - if element['@included'] - create_node_map(element.delete('@included'), graph_map, - active_graph: active_graph) - end + if element['@graph'] + create_node_map(element.delete('@graph'), graph_map, + active_graph: id) + end + + if element['@included'] + create_node_map(element.delete('@included'), graph_map, + active_graph: active_graph) + end - element.keys.each do |property| - value = element[property] + element.each_key do |property| + value = element[property] - property = namer.get_name(property) if blank_node?(property) - node[property] ||= [] - create_node_map(value, graph_map, - active_graph: active_graph, - active_subject: id, - active_property: property) + property = namer.get_name(property) if blank_node?(property) + node[property] ||= [] + create_node_map(value, graph_map, + active_graph: active_graph, + active_subject: id, + active_property: property) + end end end end - end - ## - # Create annotations - # - # Updates a node map from which annotations have been folded into embedded triples to re-extract the annotations. - # - # Map entries where the key is of the form of a canonicalized JSON object are used to find keys with the `@id` and property components. If found, the original map entry is removed and entries added to an `@annotation` property of the associated value. - # - # * Keys which are of the form of a canonicalized JSON object are examined in inverse order of length. - # * Deserialize the key into a map, and re-serialize the value of `@id`. - # * If the map contains an entry with that value (after re-canonicalizing, as appropriate), and the associated antry has a item which matches the non-`@id` item from the map, the node is used to create an `@annotation` entry within that value. - # - # @param [Hash{String => Hash}] node_map - # @return [Hash{String => Hash}] - def create_annotations(node_map) - node_map.keys. - select {|k| k.start_with?('{')}. - sort_by(&:length). - reverse.each do |key| - - annotation = node_map[key] - # Deserialize key, and re-serialize the `@id` value. - emb = annotation['@id'].dup - id = emb.delete('@id') - property, value = emb.to_a.first - - # If id is a map, set it to the result of canonicalizing that value, otherwise to itself. - id = id.to_json_c14n if id.is_a?(Hash) - - next unless node_map.key?(id) - # If node map has an entry for id and that entry contains the same property and value from entry: - node = node_map[id] - - next unless node.key?(property) - - node[property].each do |emb_value| - next unless emb_value == value.first - - node_map.delete(key) - annotation.delete('@id') - add_value(emb_value, '@annotation', annotation, property_is_array: true) unless - annotation.empty? + ## + # Create annotations + # + # Updates a node map from which annotations have been folded into embedded triples to re-extract the annotations. + # + # Map entries where the key is of the form of a canonicalized JSON object are used to find keys with the `@id` and property components. If found, the original map entry is removed and entries added to an `@annotation` property of the associated value. + # + # * Keys which are of the form of a canonicalized JSON object are examined in inverse order of length. + # * Deserialize the key into a map, and re-serialize the value of `@id`. + # * If the map contains an entry with that value (after re-canonicalizing, as appropriate), and the associated antry has a item which matches the non-`@id` item from the map, the node is used to create an `@annotation` entry within that value. + # + # @param [Hash{String => Hash}] node_map + # @return [Hash{String => Hash}] + def create_annotations(node_map) + node_map.keys + .select { |k| k.start_with?('{') } + .sort_by(&:length) + .reverse_each do |key| + annotation = node_map[key] + # Deserialize key, and re-serialize the `@id` value. + emb = annotation['@id'].dup + id = emb.delete('@id') + property, value = emb.to_a.first + + # If id is a map, set it to the result of canonicalizing that value, otherwise to itself. + id = id.to_json_c14n if id.is_a?(Hash) + + next unless node_map.key?(id) + + # If node map has an entry for id and that entry contains the same property and value from entry: + node = node_map[id] + + next unless node.key?(property) + + node[property].each do |emb_value| + next unless emb_value == value.first + + node_map.delete(key) + annotation.delete('@id') + add_value(emb_value, '@annotation', annotation, property_is_array: true) unless + annotation.empty? + end end end - end - ## - # Rename blank nodes recursively within an embedded object - # - # @param [Object] node - # @return [Hash] - def rename_bnodes(node) - case node - when Array - node.map {|n| rename_bnodes(n)} - when Hash - node.inject({}) do |memo, (k, v)| - v = namer.get_name(v) if k == '@id' && v.is_a?(String) && blank_node?(v) - memo.merge(k => rename_bnodes(v)) + ## + # Rename blank nodes recursively within an embedded object + # + # @param [Object] node + # @return [Hash] + def rename_bnodes(node) + case node + when Array + node.map { |n| rename_bnodes(n) } + when Hash + node.each_with_object({}) do |(k, v), memo| + v = namer.get_name(v) if k == '@id' && v.is_a?(String) && blank_node?(v) + memo[k] = rename_bnodes(v) + end + else + node end - else - node end - end - private - - ## - # Merge nodes from all graphs in the graph_map into a new node map - # - # @param [Hash{String => Hash}] graph_map - # @return [Hash] - def merge_node_map_graphs(graph_map) - merged = {} - graph_map.each do |name, node_map| - node_map.each do |id, node| - merged_node = (merged[id] ||= {'@id' => id}) - - # Iterate over node properties - node.each do |property, values| - if property != '@type' && property.start_with?('@') - # Copy keywords - merged_node[property] = node[property].dup - else - # Merge objects - values.each do |value| - add_value(merged_node, property, value.dup, property_is_array: true) + private + + ## + # Merge nodes from all graphs in the graph_map into a new node map + # + # @param [Hash{String => Hash}] graph_map + # @return [Hash] + def merge_node_map_graphs(graph_map) + merged = {} + graph_map.each do |_name, node_map| + node_map.each do |id, node| + merged_node = (merged[id] ||= { '@id' => id }) + + # Iterate over node properties + node.each do |property, values| + if property != '@type' && property.start_with?('@') + # Copy keywords + merged_node[property] = node[property].dup + else + # Merge objects + values.each do |value| + add_value(merged_node, property, value.dup, property_is_array: true) + end end end end end - end - merged + merged + end end end end diff --git a/lib/json/ld/format.rb b/lib/json/ld/format.rb index 5022cc51..4a44ed38 100644 --- a/lib/json/ld/format.rb +++ b/lib/json/ld/format.rb @@ -1,215 +1,223 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true -module JSON::LD - ## - # JSON-LD format specification. - # - # @example Obtaining an JSON-LD format class - # RDF::Format.for(:jsonld) #=> JSON::LD::Format - # RDF::Format.for("etc/foaf.jsonld") - # RDF::Format.for(:file_name => "etc/foaf.jsonld") - # RDF::Format.for(file_extension: "jsonld") - # RDF::Format.for(:content_type => "application/ld+json") - # - # @example Obtaining serialization format MIME types - # RDF::Format.content_types #=> {"application/ld+json" => [JSON::LD::Format], - # "application/x-ld+json" => [JSON::LD::Format]} - # - # @example Obtaining serialization format file extension mappings - # RDF::Format.file_extensions #=> {:jsonld => [JSON::LD::Format] } - # - # @see https://www.w3.org/TR/json-ld11/ - # @see https://w3c.github.io/json-ld-api/tests/ - class Format < RDF::Format - content_type 'application/ld+json', - extension: :jsonld, - alias: 'application/x-ld+json', - uri: 'http://www.w3.org/ns/formats/JSON-LD' - content_encoding 'utf-8' - - reader { JSON::LD::Reader } - writer { JSON::LD::Writer } +module JSON + module LD ## - # Sample detection to see if it matches JSON-LD + # JSON-LD format specification. # - # Use a text sample to detect the format of an input file. Sub-classes implement - # a matcher sufficient to detect probably format matches, including disambiguating - # between other similar formats. + # @example Obtaining an JSON-LD format class + # RDF::Format.for(:jsonld) #=> JSON::LD::Format + # RDF::Format.for("etc/foaf.jsonld") + # RDF::Format.for(:file_name => "etc/foaf.jsonld") + # RDF::Format.for(file_extension: "jsonld") + # RDF::Format.for(:content_type => "application/ld+json") # - # @param [String] sample Beginning several bytes (~ 1K) of input. - # @return [Boolean] - def self.detect(sample) - !!sample.match(/\{\s*"@(id|context|type)"/m) && - # Exclude CSVW metadata - !sample.include?("http://www.w3.org/ns/csvw") - end + # @example Obtaining serialization format MIME types + # RDF::Format.content_types #=> {"application/ld+json" => [JSON::LD::Format], + # "application/x-ld+json" => [JSON::LD::Format]} + # + # @example Obtaining serialization format file extension mappings + # RDF::Format.file_extensions #=> {:jsonld => [JSON::LD::Format] } + # + # @see https://www.w3.org/TR/json-ld11/ + # @see https://w3c.github.io/json-ld-api/tests/ + class Format < RDF::Format + content_type 'application/ld+json', + extension: :jsonld, + alias: 'application/x-ld+json', + uri: 'http://www.w3.org/ns/formats/JSON-LD' + content_encoding 'utf-8' + + reader { JSON::LD::Reader } + writer { JSON::LD::Writer } + + ## + # Sample detection to see if it matches JSON-LD + # + # Use a text sample to detect the format of an input file. Sub-classes implement + # a matcher sufficient to detect probably format matches, including disambiguating + # between other similar formats. + # + # @param [String] sample Beginning several bytes (~ 1K) of input. + # @return [Boolean] + def self.detect(sample) + !!sample.match(/\{\s*"@(id|context|type)"/m) && + # Exclude CSVW metadata + !sample.include?("http://www.w3.org/ns/csvw") + end - # Specify how to execute CLI commands for each supported format. - # Derived formats (e.g., YAML-LD) define their own entrypoints. - LD_FORMATS = { - jsonld: { - expand: ->(input, **options) { - JSON::LD::API.expand(input, - serializer: JSON::LD::API.method(:serializer), - **options) - }, - compact: ->(input, **options) { - JSON::LD::API.compact(input, - options[:context], - serializer: JSON::LD::API.method(:serializer), - **options) - }, - flatten: ->(input, **options) { - JSON::LD::API.flatten(input, - options[:context], - serializer: JSON::LD::API.method(:serializer), - **options) - }, - frame: ->(input, **options) { - JSON::LD::API.frame(input, - options[:frame], - serializer: JSON::LD::API.method(:serializer), - **options) - }, + # Specify how to execute CLI commands for each supported format. + # Derived formats (e.g., YAML-LD) define their own entrypoints. + LD_FORMATS = { + jsonld: { + expand: lambda { |input, **options| + JSON::LD::API.expand(input, + serializer: JSON::LD::API.method(:serializer), + **options) + }, + compact: lambda { |input, **options| + JSON::LD::API.compact(input, + options[:context], + serializer: JSON::LD::API.method(:serializer), + **options) + }, + flatten: lambda { |input, **options| + JSON::LD::API.flatten(input, + options[:context], + serializer: JSON::LD::API.method(:serializer), + **options) + }, + frame: lambda { |input, **options| + JSON::LD::API.frame(input, + options[:frame], + serializer: JSON::LD::API.method(:serializer), + **options) + } + } } - } - # Execute the body of a CLI command, generic for each different API method based on definitions on {LD_FORMATS}. - # - # Expands the input, or transforms from an RDF format based on the `:format` option, and then executes the appropriate command based on `:output_format` and does appropriate output serialization. - # @private - def self.cli_exec(command, files, output: $stdin, **options) - output.set_encoding(Encoding::UTF_8) if output.respond_to?(:set_encoding) && RUBY_PLATFORM == "java" - options[:base] ||= options[:base_uri] + # Execute the body of a CLI command, generic for each different API method based on definitions on {LD_FORMATS}. + # + # Expands the input, or transforms from an RDF format based on the `:format` option, and then executes the appropriate command based on `:output_format` and does appropriate output serialization. + # @private + def self.cli_exec(command, files, output: $stdin, **options) + output.set_encoding(Encoding::UTF_8) if output.respond_to?(:set_encoding) && RUBY_PLATFORM == "java" + options[:base] ||= options[:base_uri] - # Parse using input format, serialize using output format - in_fmt = LD_FORMATS[options.fetch(:format, :jsonld)] - out_fmt = LD_FORMATS[options.fetch(:output_format, :jsonld)] + # Parse using input format, serialize using output format + in_fmt = LD_FORMATS[options.fetch(:format, :jsonld)] + out_fmt = LD_FORMATS[options.fetch(:output_format, :jsonld)] - if in_fmt - # Input is a JSON-LD based source (or derived) - if files.empty? - # If files are empty, either use options[:evaluate] or STDIN - input = options[:evaluate] ? StringIO.new(options[:evaluate]) : STDIN - input.set_encoding(options.fetch(:encoding, Encoding::UTF_8)) - expanded = in_fmt[:expand].call(input, serializer: nil, **options) - output.puts out_fmt[command].call(expanded, expanded: true, **options) - else - files.each do |file| - expanded = in_fmt[:expand].call(file, serializer: nil, **options) + if in_fmt + # Input is a JSON-LD based source (or derived) + if files.empty? + # If files are empty, either use options[:evaluate] or STDIN + input = options[:evaluate] ? StringIO.new(options[:evaluate]) : $stdin + input.set_encoding(options.fetch(:encoding, Encoding::UTF_8)) + expanded = in_fmt[:expand].call(input, serializer: nil, **options) output.puts out_fmt[command].call(expanded, expanded: true, **options) + else + files.each do |file| + expanded = in_fmt[:expand].call(file, serializer: nil, **options) + output.puts out_fmt[command].call(expanded, expanded: true, **options) + end end - end - else - # Turn RDF into JSON-LD first - RDF::CLI.parse(files, **options) do |reader| - JSON::LD::API.fromRdf(reader, serializer: nil, **options) do |expanded| - output.puts out_fmt[command].call(expanded, expanded: true, **options) + else + # Turn RDF into JSON-LD first + RDF::CLI.parse(files, **options) do |reader| + JSON::LD::API.fromRdf(reader, serializer: nil, **options) do |expanded| + output.puts out_fmt[command].call(expanded, expanded: true, **options) + end end end end - end - ## - # Hash of CLI commands appropriate for this format: - # - # * `expand` => {JSON::LD::API.expand} - # * `compact` => {JSON::LD::API.compact} - # * `flatten` => {JSON::LD::API.flatten} - # * `frame` => {JSON::LD::API.frame} - # - # @return [Hash{Symbol => Hash}] - def self.cli_commands - { - expand: { - description: "Expand JSON-LD or parsed RDF", - parse: false, - help: "expand [--context ] files ...", - filter: {output_format: LD_FORMATS.keys}, # Only shows output format set - lambda: ->(files, **options) do - options = options.merge(expandContext: options.delete(:context)) if options.key?(:context) - cli_exec(:expand, files, **options) - end, - option_use: {context: :removed} - }, - compact: { - description: "Compact JSON-LD or parsed RDF", - parse: false, - filter: {output_format: LD_FORMATS.keys}, # Only shows output format set - help: "compact --context files ...", - lambda: ->(files, **options) do - raise ArgumentError, "Compacting requires a context" unless options[:context] - cli_exec(:compact, files, **options) - end, - options: [ - RDF::CLI::Option.new( - symbol: :context, - datatype: RDF::URI, - control: :url2, - use: :required, - on: ["--context CONTEXT"], - description: "Context to use when compacting.") {|arg| RDF::URI(arg).absolute? ? RDF::URI(arg) : StringIO.new(File.read(arg))}, - ] - }, - flatten: { - description: "Flatten JSON-LD or parsed RDF", - parse: false, - help: "flatten [--context ] files ...", - filter: {output_format: LD_FORMATS.keys}, # Only shows output format set - lambda: ->(files, **options) do - cli_exec(:compact, files, **options) - end, - options: [ - RDF::CLI::Option.new( - symbol: :context, - datatype: RDF::URI, - control: :url2, - use: :required, - on: ["--context CONTEXT"], - description: "Context to use when compacting.") {|arg| RDF::URI(arg)}, - RDF::CLI::Option.new( - symbol: :createAnnotations, - datatype: TrueClass, - default: false, - control: :checkbox, - on: ["--[no-]create-annotations"], - description: "Unfold embedded nodes which can be represented using `@annotation`."), - ] - }, - frame: { - description: "Frame JSON-LD or parsed RDF", - parse: false, - help: "frame --frame files ...", - filter: {output_format: LD_FORMATS.keys}, # Only shows output format set - lambda: ->(files, **options) do - raise ArgumentError, "Framing requires a frame" unless options[:frame] - cli_exec(:compact, files, **options) - end, - option_use: {context: :removed}, - options: [ - RDF::CLI::Option.new( - symbol: :frame, - datatype: RDF::URI, - control: :url2, - use: :required, - on: ["--frame FRAME"], - description: "Frame to use when serializing.") {|arg| RDF::URI(arg).absolute? ? RDF::URI(arg) : StringIO.new(File.read(arg))} - ] - }, - } - end + ## + # Hash of CLI commands appropriate for this format: + # + # * `expand` => {JSON::LD::API.expand} + # * `compact` => {JSON::LD::API.compact} + # * `flatten` => {JSON::LD::API.flatten} + # * `frame` => {JSON::LD::API.frame} + # + # @return [Hash{Symbol => Hash}] + def self.cli_commands + { + expand: { + description: "Expand JSON-LD or parsed RDF", + parse: false, + help: "expand [--context ] files ...", + filter: { output_format: LD_FORMATS.keys }, # Only shows output format set + lambda: lambda do |files, **options| + options = options.merge(expandContext: options.delete(:context)) if options.key?(:context) + cli_exec(:expand, files, **options) + end, + option_use: { context: :removed } + }, + compact: { + description: "Compact JSON-LD or parsed RDF", + parse: false, + filter: { output_format: LD_FORMATS.keys }, # Only shows output format set + help: "compact --context files ...", + lambda: lambda do |files, **options| + raise ArgumentError, "Compacting requires a context" unless options[:context] - ## - # Override normal symbol generation - def self.to_sym - :jsonld - end + cli_exec(:compact, files, **options) + end, + options: [ + RDF::CLI::Option.new( + symbol: :context, + datatype: RDF::URI, + control: :url2, + use: :required, + on: ["--context CONTEXT"], + description: "Context to use when compacting." + ) { |arg| RDF::URI(arg).absolute? ? RDF::URI(arg) : StringIO.new(File.read(arg)) } + ] + }, + flatten: { + description: "Flatten JSON-LD or parsed RDF", + parse: false, + help: "flatten [--context ] files ...", + filter: { output_format: LD_FORMATS.keys }, # Only shows output format set + lambda: lambda do |files, **options| + cli_exec(:compact, files, **options) + end, + options: [ + RDF::CLI::Option.new( + symbol: :context, + datatype: RDF::URI, + control: :url2, + use: :required, + on: ["--context CONTEXT"], + description: "Context to use when compacting." + ) { |arg| RDF::URI(arg) }, + RDF::CLI::Option.new( + symbol: :createAnnotations, + datatype: TrueClass, + default: false, + control: :checkbox, + on: ["--[no-]create-annotations"], + description: "Unfold embedded nodes which can be represented using `@annotation`." + ) + ] + }, + frame: { + description: "Frame JSON-LD or parsed RDF", + parse: false, + help: "frame --frame files ...", + filter: { output_format: LD_FORMATS.keys }, # Only shows output format set + lambda: lambda do |files, **options| + raise ArgumentError, "Framing requires a frame" unless options[:frame] - ## - # Override normal format name - def self.name - "JSON-LD" + cli_exec(:compact, files, **options) + end, + option_use: { context: :removed }, + options: [ + RDF::CLI::Option.new( + symbol: :frame, + datatype: RDF::URI, + control: :url2, + use: :required, + on: ["--frame FRAME"], + description: "Frame to use when serializing." + ) { |arg| RDF::URI(arg).absolute? ? RDF::URI(arg) : StringIO.new(File.read(arg)) } + ] + } + } + end + + ## + # Override normal symbol generation + def self.to_sym + :jsonld + end + + ## + # Override normal format name + def self.name + "JSON-LD" + end end end end diff --git a/lib/json/ld/frame.rb b/lib/json/ld/frame.rb index e9b2b943..1c42100f 100644 --- a/lib/json/ld/frame.rb +++ b/lib/json/ld/frame.rb @@ -1,224 +1,234 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true + require 'set' -module JSON::LD - module Frame - include Utils - - ## - # Frame input. Input is expected in expanded form, but frame is in compacted form. - # - # @param [Hash{Symbol => Object}] state - # Current framing state - # @param [Array] subjects - # The subjects to filter - # @param [Hash{String => Object}] frame - # @param [String] property (nil) - # The parent property. - # @param [Hash{String => Object}] parent (nil) - # Parent subject or top-level array - # @param [Boolean] ordered (true) - # Ensure output objects have keys ordered properly - # @param [Hash{Symbol => Object}] options ({}) - # @raise [JSON::LD::InvalidFrame] - def frame(state, subjects, frame, parent: nil, property: nil, ordered: false, **options) - # Validate the frame - validate_frame(frame) - frame = frame.first if frame.is_a?(Array) - - # Get values for embedOn and explicitOn - flags = { - embed: get_frame_flag(frame, options, :embed), - explicit: get_frame_flag(frame, options, :explicit), - requireAll: get_frame_flag(frame, options, :requireAll), - } - - # Get link for current graph - link = state[:link][state[:graph]] ||= {} - - # Create a set of matched subjects by filtering subjects by checking the map of flattened subjects against frame - # This gives us a hash of objects indexed by @id - matches = filter_subjects(state, subjects, frame, flags) - - # For each id and node from the set of matched subjects ordered by id - matches.keys.opt_sort(ordered: ordered).each do |id| - subject = matches[id] - - # Note: In order to treat each top-level match as a compartmentalized result, clear the unique embedded subjects map when the property is nil, which only occurs at the top-level. - if property.nil? - state[:uniqueEmbeds] = {state[:graph] => {}} - else - state[:uniqueEmbeds][state[:graph]] ||= {} - end +module JSON + module LD + module Frame + include Utils + + ## + # Frame input. Input is expected in expanded form, but frame is in compacted form. + # + # @param [Hash{Symbol => Object}] state + # Current framing state + # @param [Array] subjects + # The subjects to filter + # @param [Hash{String => Object}] frame + # @param [String] property (nil) + # The parent property. + # @param [Hash{String => Object}] parent (nil) + # Parent subject or top-level array + # @param [Boolean] ordered (true) + # Ensure output objects have keys ordered properly + # @param [Hash{Symbol => Object}] options ({}) + # @raise [JSON::LD::InvalidFrame] + def frame(state, subjects, frame, parent: nil, property: nil, ordered: false, **options) + # Validate the frame + validate_frame(frame) + frame = frame.first if frame.is_a?(Array) + + # Get values for embedOn and explicitOn + flags = { + embed: get_frame_flag(frame, options, :embed), + explicit: get_frame_flag(frame, options, :explicit), + requireAll: get_frame_flag(frame, options, :requireAll) + } - if flags[:embed] == '@link' && link.key?(id) - # add existing linked subject - add_frame_output(parent, property, link[id]) - next - end + # Get link for current graph + link = state[:link][state[:graph]] ||= {} - output = {'@id' => id} - link[id] = output + # Create a set of matched subjects by filtering subjects by checking the map of flattened subjects against frame + # This gives us a hash of objects indexed by @id + matches = filter_subjects(state, subjects, frame, flags) - if %w(@first @last).include?(flags[:embed]) && context.processingMode('json-ld-1.1') - raise JSON::LD::JsonLdError::InvalidEmbedValue, "#{flags[:embed]} is not a valid value of @embed in 1.1 mode" if @options[:validate] - warn "[DEPRECATION] #{flags[:embed]} is not a valid value of @embed in 1.1 mode.\n" - end + # For each id and node from the set of matched subjects ordered by id + matches.keys.opt_sort(ordered: ordered).each do |id| + subject = matches[id] - if !state[:embedded] && state[:uniqueEmbeds][state[:graph]].key?(id) - # Skip adding this node object to the top-level, as it was included in another node object - next - elsif state[:embedded] && - (flags[:embed] == '@never' || creates_circular_reference(subject, state[:graph], state[:subjectStack])) - # if embed is @never or if a circular reference would be created by an embed, the subject cannot be embedded, just add the reference; note that a circular reference won't occur when the embed flag is `@link` as the above check will short-circuit before reaching this point - add_frame_output(parent, property, output) - next - elsif state[:embedded] && - %w(@first @once).include?(flags[:embed]) && - state[:uniqueEmbeds][state[:graph]].key?(id) + # NOTE: In order to treat each top-level match as a compartmentalized result, clear the unique embedded subjects map when the property is nil, which only occurs at the top-level. + if property.nil? + state[:uniqueEmbeds] = { state[:graph] => {} } + else + state[:uniqueEmbeds][state[:graph]] ||= {} + end - # if only the first match should be embedded - # Embed unless already embedded - add_frame_output(parent, property, output) - next - elsif flags[:embed] == '@last' - # if only the last match should be embedded - # remove any existing embed - remove_embed(state, id) if state[:uniqueEmbeds][state[:graph]].include?(id) - end + if flags[:embed] == '@link' && link.key?(id) + # add existing linked subject + add_frame_output(parent, property, link[id]) + next + end - state[:uniqueEmbeds][state[:graph]][id] = { - parent: parent, - property: property - } + output = { '@id' => id } + link[id] = output - # push matching subject onto stack to enable circular embed checks - state[:subjectStack] << {subject: subject, graph: state[:graph]} - - # Subject is also the name of a graph - if state[:graphMap].key?(id) - # check frame's "@graph" to see what to do next - # 1. if it doesn't exist and state.graph === "@merged", don't recurse - # 2. if it doesn't exist and state.graph !== "@merged", recurse - # 3. if "@merged" then don't recurse - # 4. if "@default" then don't recurse - # 5. recurse - recurse, subframe = false, nil - if !frame.key?('@graph') - recurse, subframe = (state[:graph] != '@merged'), {} - else - subframe = frame['@graph'].first - recurse = !(id == '@merged' || id == '@default') - subframe = {} unless subframe.is_a?(Hash) + if %w[@first @last].include?(flags[:embed]) && context.processingMode('json-ld-1.1') + if @options[:validate] + raise JSON::LD::JsonLdError::InvalidEmbedValue, + "#{flags[:embed]} is not a valid value of @embed in 1.1 mode" + end + + warn "[DEPRECATION] #{flags[:embed]} is not a valid value of @embed in 1.1 mode.\n" end - if recurse - frame(state.merge(graph: id, embedded: false), state[:graphMap][id].keys, [subframe], parent: output, property: '@graph', **options) + if !state[:embedded] && state[:uniqueEmbeds][state[:graph]].key?(id) + # Skip adding this node object to the top-level, as it was included in another node object + next + elsif state[:embedded] && + (flags[:embed] == '@never' || creates_circular_reference(subject, state[:graph], state[:subjectStack])) + # if embed is @never or if a circular reference would be created by an embed, the subject cannot be embedded, just add the reference; note that a circular reference won't occur when the embed flag is `@link` as the above check will short-circuit before reaching this point + add_frame_output(parent, property, output) + next + elsif state[:embedded] && + %w[@first @once].include?(flags[:embed]) && + state[:uniqueEmbeds][state[:graph]].key?(id) + + # if only the first match should be embedded + # Embed unless already embedded + add_frame_output(parent, property, output) + next + elsif flags[:embed] == '@last' + # if only the last match should be embedded + # remove any existing embed + remove_embed(state, id) if state[:uniqueEmbeds][state[:graph]].include?(id) end - end - # If frame has `@included`, recurse over its sub-frame - if frame['@included'] - frame(state.merge(embedded: false), subjects, frame['@included'], parent: output, property: '@included', **options) - end + state[:uniqueEmbeds][state[:graph]][id] = { + parent: parent, + property: property + } + + # push matching subject onto stack to enable circular embed checks + state[:subjectStack] << { subject: subject, graph: state[:graph] } + + # Subject is also the name of a graph + if state[:graphMap].key?(id) + # check frame's "@graph" to see what to do next + # 1. if it doesn't exist and state.graph === "@merged", don't recurse + # 2. if it doesn't exist and state.graph !== "@merged", recurse + # 3. if "@merged" then don't recurse + # 4. if "@default" then don't recurse + # 5. recurse + recurse = false + subframe = nil + if frame.key?('@graph') + subframe = frame['@graph'].first + recurse = !['@merged', '@default'].include?(id) + subframe = {} unless subframe.is_a?(Hash) + else + recurse = (state[:graph] != '@merged') + subframe = {} + end - # iterate over subject properties in order - subject.keys.opt_sort(ordered: ordered).each do |prop| - objects = subject[prop] + if recurse + frame(state.merge(graph: id, embedded: false), state[:graphMap][id].keys, [subframe], parent: output, + property: '@graph', **options) + end + end - # copy keywords to output - if prop.start_with?('@') - output[prop] = objects.dup - next + # If frame has `@included`, recurse over its sub-frame + if frame['@included'] + frame(state.merge(embedded: false), subjects, frame['@included'], parent: output, property: '@included', + **options) end - # explicit is on and property isn't in frame, skip processing - next if flags[:explicit] && !frame.key?(prop) - - # add objects - objects.each do |o| - subframe = Array(frame[prop]).first || create_implicit_frame(flags) - - case - when list?(o) - subframe = frame[prop].first['@list'] if Array(frame[prop]).first.is_a?(Hash) - subframe ||= create_implicit_frame(flags) - # add empty list - list = {'@list' => []} - add_frame_output(output, prop, list) - - src = o['@list'] - src.each do |oo| - if node_reference?(oo) - frame(state.merge(embedded: true), [oo['@id']], subframe, parent: list, property: '@list', **options) - else - add_frame_output(list, '@list', oo.dup) + # iterate over subject properties in order + subject.keys.opt_sort(ordered: ordered).each do |prop| + objects = subject[prop] + + # copy keywords to output + if prop.start_with?('@') + output[prop] = objects.dup + next + end + + # explicit is on and property isn't in frame, skip processing + next if flags[:explicit] && !frame.key?(prop) + + # add objects + objects.each do |o| + subframe = Array(frame[prop]).first || create_implicit_frame(flags) + + if list?(o) + subframe = frame[prop].first['@list'] if Array(frame[prop]).first.is_a?(Hash) + subframe ||= create_implicit_frame(flags) + # add empty list + list = { '@list' => [] } + add_frame_output(output, prop, list) + + src = o['@list'] + src.each do |oo| + if node_reference?(oo) + frame(state.merge(embedded: true), [oo['@id']], subframe, parent: list, property: '@list', +**options) + else + add_frame_output(list, '@list', oo.dup) + end end + elsif node_reference?(o) + # recurse into subject reference + frame(state.merge(embedded: true), [o['@id']], subframe, parent: output, property: prop, **options) + elsif value_match?(subframe, o) + # Include values if they match + add_frame_output(output, prop, o.dup) end - when node_reference?(o) - # recurse into subject reference - frame(state.merge(embedded: true), [o['@id']], subframe, parent: output, property: prop, **options) - when value_match?(subframe, o) - # Include values if they match - add_frame_output(output, prop, o.dup) end end - end - # handle defaults in order - frame.keys.opt_sort(ordered: ordered).each do |prop| - if prop == '@type' && frame[prop].first.is_a?(Hash) && frame[prop].first.keys == %w(@default) - # Treat this as a default - elsif prop.start_with?('@') - next - end + # handle defaults in order + frame.keys.opt_sort(ordered: ordered).each do |prop| + if prop == '@type' && frame[prop].first.is_a?(Hash) && frame[prop].first.keys == %w[@default] + # Treat this as a default + elsif prop.start_with?('@') + next + end - # if omit default is off, then include default values for properties that appear in the next frame but are not in the matching subject - n = frame[prop].first || {} - omit_default_on = get_frame_flag(n, options, :omitDefault) - if !omit_default_on && !output[prop] - preserve = as_array(n.fetch('@default', '@null').dup) - output[prop] = [{'@preserve' => preserve}] + # if omit default is off, then include default values for properties that appear in the next frame but are not in the matching subject + n = frame[prop].first || {} + omit_default_on = get_frame_flag(n, options, :omitDefault) + if !omit_default_on && !output[prop] + preserve = as_array(n.fetch('@default', '@null').dup) + output[prop] = [{ '@preserve' => preserve }] + end end - end - # If frame has @reverse, embed identified nodes having this subject as a value of the associated property. - frame.fetch('@reverse', {}).each do |reverse_prop, subframe| - state[:subjects].each do |r_id, node| - if Array(node[reverse_prop]).any? {|v| v['@id'] == id} + # If frame has @reverse, embed identified nodes having this subject as a value of the associated property. + frame.fetch('@reverse', {}).each do |reverse_prop, subframe| + state[:subjects].each do |r_id, node| + next unless Array(node[reverse_prop]).any? { |v| v['@id'] == id } + # Node has property referencing this subject # recurse into reference (output['@reverse'] ||= {})[reverse_prop] ||= [] - frame(state.merge(embedded: true), [r_id], subframe, parent: output['@reverse'][reverse_prop], property: property, **options) + frame(state.merge(embedded: true), [r_id], subframe, parent: output['@reverse'][reverse_prop], + property: property, **options) end end - end - # add output to parent - add_frame_output(parent, property, output) + # add output to parent + add_frame_output(parent, property, output) - # pop matching subject from circular ref-checking stack - state[:subjectStack].pop() + # pop matching subject from circular ref-checking stack + state[:subjectStack].pop + end + # end end - #end - end - ## - # Recursively find and count blankNode identifiers. - # @return [Hash{String => Integer}] - def count_blank_node_identifiers(input) - {}.tap do |results| - count_blank_node_identifiers_internal(input, results) + ## + # Recursively find and count blankNode identifiers. + # @return [Hash{String => Integer}] + def count_blank_node_identifiers(input) + {}.tap do |results| + count_blank_node_identifiers_internal(input, results) + end end - end - def count_blank_node_identifiers_internal(input, results) - case input + def count_blank_node_identifiers_internal(input, results) + case input when Array - input.each {|o| count_blank_node_identifiers_internal(o, results)} + input.each { |o| count_blank_node_identifiers_internal(o, results) } when Hash - input.each do |k, v| + input.each do |_k, v| count_blank_node_identifiers_internal(v, results) end when String @@ -226,377 +236,390 @@ def count_blank_node_identifiers_internal(input, results) results[input] ||= 0 results[input] += 1 end + end end - end - ## - # Prune BNode identifiers recursively - # - # @param [Array, Hash] input - # @param [Array] bnodes_to_clear - # @return [Array, Hash] - def prune_bnodes(input, bnodes_to_clear) - result = case input - when Array - # If, after replacement, an array contains only the value null remove the value, leaving an empty array. - input.map {|o| prune_bnodes(o, bnodes_to_clear)}.compact - when Hash - output = Hash.new - input.each do |key, value| - if context.expand_iri(key) == '@id' && bnodes_to_clear.include?(value) - # Don't add this to output, as it is pruned as being superfluous - else - output[key] = prune_bnodes(value, bnodes_to_clear) + ## + # Prune BNode identifiers recursively + # + # @param [Array, Hash] input + # @param [Array] bnodes_to_clear + # @return [Array, Hash] + def prune_bnodes(input, bnodes_to_clear) + case input + when Array + # If, after replacement, an array contains only the value null remove the value, leaving an empty array. + input.map { |o| prune_bnodes(o, bnodes_to_clear) }.compact + when Hash + output = {} + input.each do |key, value| + if context.expand_iri(key) == '@id' && bnodes_to_clear.include?(value) + # Don't add this to output, as it is pruned as being superfluous + else + output[key] = prune_bnodes(value, bnodes_to_clear) + end end + output + else + input end - output - else - input end - result - end - ## - # Replace @preserve keys with the values, also replace @null with null. - # - # @param [Array, Hash] input - # @return [Array, Hash] - def cleanup_preserve(input) - case input - when Array - # If, after replacement, an array contains only the value null remove the value, leaving an empty array. - input.map {|o| cleanup_preserve(o)} - when Hash - if input.key?('@preserve') - # Replace with the content of `@preserve` - cleanup_preserve(input['@preserve'].first) - else - input.inject({}) do |memo, (k,v)| - memo.merge(k => cleanup_preserve(v)) + ## + # Replace @preserve keys with the values, also replace @null with null. + # + # @param [Array, Hash] input + # @return [Array, Hash] + def cleanup_preserve(input) + case input + when Array + input.map! { |o| cleanup_preserve(o) } + when Hash + if input.key?('@preserve') + # Replace with the content of `@preserve` + cleanup_preserve(input['@preserve'].first) + else + input.transform_values do |v| + cleanup_preserve(v) + end end + else + input end - else - input end - end - ## - # Replace `@null` with `null`, removing it from arrays. - # - # @param [Array, Hash] input - # @return [Array, Hash] - def cleanup_null(input) - result = case input - when Array - # If, after replacement, an array contains only the value null remove the value, leaving an empty array. - input.map {|o| cleanup_null(o)}.compact - when Hash - input.inject({}) do |memo, (k,v)| - memo.merge(k => cleanup_null(v)) + ## + # Replace `@null` with `null`, removing it from arrays. + # + # @param [Array, Hash] input + # @return [Array, Hash] + def cleanup_null(input) + case input + when Array + # If, after replacement, an array contains only the value null remove the value, leaving an empty array. + input.map! { |o| cleanup_null(o) }.compact + when Hash + input.transform_values do |v| + cleanup_null(v) + end + when '@null' + # If the value from the key-pair is @null, replace the value with null + nil + else + input end - when '@null' - # If the value from the key-pair is @null, replace the value with null - nil - else - input end - result - end - private - - ## - # Returns a map of all of the subjects that match a parsed frame. - # - # @param [Hash{Symbol => Object}] state - # Current framing state - # @param [Array] subjects - # The subjects to filter - # @param [Hash{String => Object}] frame - # @param [Hash{Symbol => String}] flags the frame flags. - # - # @return all of the matched subjects. - def filter_subjects(state, subjects, frame, flags) - subjects.each_with_object({}) do |id, memo| - subject = state[:graphMap][state[:graph]][id] - memo[id] = subject if filter_subject(subject, frame, state, flags) + private + + ## + # Returns a map of all of the subjects that match a parsed frame. + # + # @param [Hash{Symbol => Object}] state + # Current framing state + # @param [Array] subjects + # The subjects to filter + # @param [Hash{String => Object}] frame + # @param [Hash{Symbol => String}] flags the frame flags. + # + # @return all of the matched subjects. + def filter_subjects(state, subjects, frame, flags) + subjects.each_with_object({}) do |id, memo| + subject = state[:graphMap][state[:graph]][id] + memo[id] = subject if filter_subject(subject, frame, state, flags) + end end - end - ## - # Returns true if the given node matches the given frame. - # - # Matches either based on explicit type inclusion where the node has any type listed in the frame. If the frame has empty types defined matches nodes not having a @type. If the frame has a type of {} defined matches nodes having any type defined. - # - # Otherwise, does duck typing, where the node must have any or all of the properties defined in the frame, depending on the `requireAll` flag. - # - # @param [Hash{String => Object}] subject the subject to check. - # @param [Hash{String => Object}] frame the frame to check. - # @param [Hash{Symbol => Object}] state Current framing state - # @param [Hash{Symbol => Object}] flags the frame flags. - # - # @return [Boolean] true if the node matches, false if not. - def filter_subject(subject, frame, state, flags) - # Duck typing, for nodes not having a type, but having @id - wildcard, matches_some = true, false - - frame.each do |k, v| - node_values = subject.fetch(k, []) - - case k - when '@id' - ids = v || [] - - # Match on specific @id. - match_this = case ids - when [], [{}] - # Match on no @id or any @id - true - else - # Match on specific @id - ids.include?(subject['@id']) - end - return match_this if !flags[:requireAll] - when '@type' - # No longer a wildcard pattern - wildcard = false - - match_this = case v - when [] - # Don't match with any @type - return false if !node_values.empty? - true - when [{}] - # Match with any @type - !node_values.empty? - else - # Treat a map with @default like an empty map - if v.first.is_a?(Hash) && v.first.keys == %w(@default) + ## + # Returns true if the given node matches the given frame. + # + # Matches either based on explicit type inclusion where the node has any type listed in the frame. If the frame has empty types defined matches nodes not having a @type. If the frame has a type of {} defined matches nodes having any type defined. + # + # Otherwise, does duck typing, where the node must have any or all of the properties defined in the frame, depending on the `requireAll` flag. + # + # @param [Hash{String => Object}] subject the subject to check. + # @param [Hash{String => Object}] frame the frame to check. + # @param [Hash{Symbol => Object}] state Current framing state + # @param [Hash{Symbol => Object}] flags the frame flags. + # + # @return [Boolean] true if the node matches, false if not. + def filter_subject(subject, frame, state, flags) + # Duck typing, for nodes not having a type, but having @id + wildcard = true + matches_some = false + + frame.each do |k, v| + node_values = subject.fetch(k, []) + + case k + when '@id' + ids = v || [] + + # Match on specific @id. + match_this = case ids + when [], [{}] + # Match on no @id or any @id true - elsif (v & node_values).empty? - # Match on specific @type - false else - true + # Match on specific @id + ids.include?(subject['@id']) end - end - return match_this if !flags[:requireAll] - when /@/ - # Skip other keywords - next - else - is_empty = v.empty? - if v = v.first - validate_frame(v) - has_default = v.key?('@default') - end + return match_this unless flags[:requireAll] + when '@type' + # No longer a wildcard pattern + wildcard = false - # No longer a wildcard pattern if frame has any non-keyword properties - wildcard = false - - # Skip, but allow match if node has no value for property, and frame has a default value - next if node_values.empty? && has_default - - # If frame value is empty, don't match if subject has any value - return false if !node_values.empty? && is_empty - - match_this = case - when v.nil? - # node does not match if values is not empty and the value of property in frame is match none. - return false unless node_values.empty? - true - when v.is_a?(Hash) && (v.keys - FRAMING_KEYWORDS).empty? - # node matches if values is not empty and the value of property in frame is wildcard (frame with properties other than framing keywords) - !node_values.empty? - when value?(v) - # Match on any matching value - node_values.any? {|nv| value_match?(v, nv)} - when node?(v) || node_reference?(v) - node_values.any? do |nv| - node_match?(v, nv, state, flags) + match_this = case v + when [] + # Don't match with any @type + return false unless node_values.empty? + + true + when [{}] + # Match with any @type + !node_values.empty? + else + # Treat a map with @default like an empty map + if v.first.is_a?(Hash) && v.first.keys == %w[@default] + true + else + !(v & node_values).empty? + end end - when list?(v) - vv = v['@list'].first - node_values = list?(node_values.first) ? - node_values.first['@list'] : - false - if !node_values - false # Lists match Lists - elsif value?(vv) + return match_this unless flags[:requireAll] + when /@/ + # Skip other keywords + next + else + is_empty = v.empty? + if (v = v.first) + validate_frame(v) + has_default = v.key?('@default') + end + + # No longer a wildcard pattern if frame has any non-keyword properties + wildcard = false + + # Skip, but allow match if node has no value for property, and frame has a default value + next if node_values.empty? && has_default + + # If frame value is empty, don't match if subject has any value + return false if !node_values.empty? && is_empty + + match_this = case + when v.nil? + # node does not match if values is not empty and the value of property in frame is match none. + return false unless node_values.empty? + + true + when v.is_a?(Hash) && (v.keys - FRAMING_KEYWORDS).empty? + # node matches if values is not empty and the value of property in frame is wildcard (frame with properties other than framing keywords) + !node_values.empty? + when value?(v) # Match on any matching value - node_values.any? {|nv| value_match?(vv, nv)} - elsif node?(vv) || node_reference?(vv) + node_values.any? { |nv| value_match?(v, nv) } + when node?(v) || node_reference?(v) node_values.any? do |nv| - node_match?(vv, nv, state, flags) + node_match?(v, nv, state, flags) + end + when list?(v) + vv = v['@list'].first + node_values = if list?(node_values.first) + node_values.first['@list'] + else + false + end + if !node_values + false # Lists match Lists + elsif value?(vv) + # Match on any matching value + node_values.any? { |nv| value_match?(vv, nv) } + elsif node?(vv) || node_reference?(vv) + node_values.any? do |nv| + node_match?(vv, nv, state, flags) + end + else + false end else - false + false # No matching on non-value or node values end - else - false # No matching on non-value or node values end - end - # All non-defaulted values must match if @requireAll is set - return false if !match_this && flags[:requireAll] + # All non-defaulted values must match if @requireAll is set + return false if !match_this && flags[:requireAll] + + matches_some ||= match_this + end - matches_some ||= match_this + # return true if wildcard or subject matches some properties + wildcard || matches_some end - # return true if wildcard or subject matches some properties - wildcard || matches_some - end + def validate_frame(frame) + unless frame.is_a?(Hash) || (frame.is_a?(Array) && frame.first.is_a?(Hash) && frame.length == 1) + raise JsonLdError::InvalidFrame, + "Invalid JSON-LD frame syntax; a JSON-LD frame must be an object: #{frame.inspect}" + end + frame = frame.first if frame.is_a?(Array) - def validate_frame(frame) - raise JsonLdError::InvalidFrame, - "Invalid JSON-LD frame syntax; a JSON-LD frame must be an object: #{frame.inspect}" unless - frame.is_a?(Hash) || (frame.is_a?(Array) && frame.first.is_a?(Hash) && frame.length == 1) - frame = frame.first if frame.is_a?(Array) - - # Check values of @id and @type - raise JsonLdError::InvalidFrame, - "Invalid JSON-LD frame syntax; invalid value of @id: #{frame['@id']}" unless - Array(frame['@id']) == [{}] || Array(frame['@id']).all?{|v| RDF::URI(v).valid?} - raise JsonLdError::InvalidFrame, - "Invalid JSON-LD frame syntax; invalid value of @type: #{frame['@type']}" unless - Array(frame['@type']).all?{|v| v.is_a?(Hash) && (v.keys - %w(@default)).empty? || RDF::URI(v).valid?} - end + # Check values of @id and @type + unless Array(frame['@id']) == [{}] || Array(frame['@id']).all? { |v| RDF::URI(v).valid? } + raise JsonLdError::InvalidFrame, + "Invalid JSON-LD frame syntax; invalid value of @id: #{frame['@id']}" + end + unless Array(frame['@type']).all? do |v| + (v.is_a?(Hash) && (v.keys - %w[@default]).empty?) || RDF::URI(v).valid? + end + raise JsonLdError::InvalidFrame, + "Invalid JSON-LD frame syntax; invalid value of @type: #{frame['@type']}" + end + end - # Checks the current subject stack to see if embedding the given subject would cause a circular reference. - # - # @param subject_to_embed the subject to embed. - # @param graph the graph the subject to embed is in. - # @param subject_stack the current stack of subjects. - # - # @return true if a circular reference would be created, false if not. - def creates_circular_reference(subject_to_embed, graph, subject_stack) - subject_stack[0..-2].any? do |subject| - subject[:graph] == graph && subject[:subject]['@id'] == subject_to_embed['@id'] + # Checks the current subject stack to see if embedding the given subject would cause a circular reference. + # + # @param subject_to_embed the subject to embed. + # @param graph the graph the subject to embed is in. + # @param subject_stack the current stack of subjects. + # + # @return true if a circular reference would be created, false if not. + def creates_circular_reference(subject_to_embed, graph, subject_stack) + subject_stack[0..-2].any? do |subject| + subject[:graph] == graph && subject[:subject]['@id'] == subject_to_embed['@id'] + end end - end - ## - # Gets the frame flag value for the given flag name. - # - # @param frame the frame. - # @param options the framing options. - # @param name the flag name. - # - # @return the flag value. - def get_frame_flag(frame, options, name) - rval = frame.fetch("@#{name}", [options[name]]).first - rval = rval.values.first if value?(rval) - if name == :embed - rval = case rval - when true then '@once' - when false then '@never' - when '@always', '@first', '@last', '@link', '@once', '@never' then rval - else - raise JsonLdError::InvalidEmbedValue, - "Invalid JSON-LD frame syntax; invalid value of @embed: #{rval}" + ## + # Gets the frame flag value for the given flag name. + # + # @param frame the frame. + # @param options the framing options. + # @param name the flag name. + # + # @return the flag value. + def get_frame_flag(frame, options, name) + rval = frame.fetch("@#{name}", [options[name]]).first + rval = rval.values.first if value?(rval) + if name == :embed + rval = case rval + when true then '@once' + when false then '@never' + when '@always', '@first', '@last', '@link', '@once', '@never' then rval + else + raise JsonLdError::InvalidEmbedValue, + "Invalid JSON-LD frame syntax; invalid value of @embed: #{rval}" + end end + rval end - rval - end - ## - # Removes an existing embed. - # - # @param state the current framing state. - # @param id the @id of the embed to remove. - def remove_embed(state, id) - # get existing embed - embeds = state[:uniqueEmbeds][state[:graph]]; - embed = embeds[id]; - property = embed[:property]; - - # create reference to replace embed - subject = {'@id' => id} - - if embed[:parent].is_a?(Array) - # replace subject with reference - embed[:parent].map! do |parent| - compare_values(parent, subject) ? subject : parent + ## + # Removes an existing embed. + # + # @param state the current framing state. + # @param id the @id of the embed to remove. + def remove_embed(state, id) + # get existing embed + embeds = state[:uniqueEmbeds][state[:graph]] + embed = embeds[id] + property = embed[:property] + + # create reference to replace embed + subject = { '@id' => id } + + if embed[:parent].is_a?(Array) + # replace subject with reference + embed[:parent].map! do |parent| + compare_values(parent, subject) ? subject : parent + end + else + parent = embed[:parent] + # replace node with reference + if parent[property].is_a?(Array) + parent[property].reject! { |v| compare_values(v, subject) } + parent[property] << subject + elsif compare_values(parent[property], subject) + parent[property] = subject + end end - else - parent = embed[:parent] - # replace node with reference - if parent[property].is_a?(Array) - parent[property].reject! {|v| compare_values(v, subject)} - parent[property] << subject - elsif compare_values(parent[property], subject) - parent[property] = subject + + # recursively remove dependent dangling embeds + def remove_dependents(id, embeds) + # get embed keys as a separate array to enable deleting keys in map + embeds.each do |id_dep, e| + p = e.fetch(:parent, {}) if e.is_a?(Hash) + next unless p.is_a?(Hash) + + pid = p.fetch('@id', nil) + if pid == id + embeds.delete(id_dep) + remove_dependents(id_dep, embeds) + end + end + end + + remove_dependents(id, embeds) + end + + ## + # Adds framing output to the given parent. + # + # @param parent the parent to add to. + # @param property the parent property, null for an array parent. + # @param output the output to add. + def add_frame_output(parent, property, output) + if parent.is_a?(Hash) + parent[property] ||= [] + parent[property] << output + else + parent << output end end - # recursively remove dependent dangling embeds - def remove_dependents(id, embeds) - # get embed keys as a separate array to enable deleting keys in map - embeds.each do |id_dep, e| - p = e.fetch(:parent, {}) if e.is_a?(Hash) - next unless p.is_a?(Hash) - pid = p.fetch('@id', nil) - if pid == id - embeds.delete(id_dep) - remove_dependents(id_dep, embeds) + # Creates an implicit frame when recursing through subject matches. If a frame doesn't have an explicit frame for a particular property, then a wildcard child frame will be created that uses the same flags that the parent frame used. + # + # @param [Hash] flags the current framing flags. + # @return [Array] the implicit frame. + def create_implicit_frame(flags) + {}.tap do |memo| + flags.each_pair do |key, val| + memo["@#{key}"] = [val] end end end - remove_dependents(id, embeds) - end + # Node matches if it is a node, and matches the pattern as a frame + def node_match?(pattern, value, state, flags) + return false unless value['@id'] - ## - # Adds framing output to the given parent. - # - # @param parent the parent to add to. - # @param property the parent property, null for an array parent. - # @param output the output to add. - def add_frame_output(parent, property, output) - if parent.is_a?(Hash) - parent[property] ||= [] - parent[property] << output - else - parent << output + node_object = state[:subjects][value['@id']] + node_object && filter_subject(node_object, pattern, state, flags) end - end - # Creates an implicit frame when recursing through subject matches. If a frame doesn't have an explicit frame for a particular property, then a wildcard child frame will be created that uses the same flags that the parent frame used. - # - # @param [Hash] flags the current framing flags. - # @return [Array] the implicit frame. - def create_implicit_frame(flags) - {}.tap do |memo| - flags.each_pair do |key, val| - memo["@#{key}"] = [val] + # Value matches if it is a value, and matches the value pattern. + # + # * `pattern` is empty + # * @values are the same, or `pattern[@value]` is a wildcard, and + # * @types are the same or `value[@type]` is not null and `pattern[@type]` is `{}`, or `value[@type]` is null and `pattern[@type]` is null or `[]`, and + # * @languages are the same or `value[@language]` is not null and `pattern[@language]` is `{}`, or `value[@language]` is null and `pattern[@language]` is null or `[]`. + def value_match?(pattern, value) + v1 = value['@value'] + t1 = value['@type'] + l1 = value['@language'] + v2 = Array(pattern['@value']) + t2 = Array(pattern['@type']) + l2 = Array(pattern['@language']).map do |v| + v.is_a?(String) ? v.downcase : v end - end - end + return true if (v2 + t2 + l2).empty? + return false unless v2.include?(v1) || v2 == [{}] + return false unless t2.include?(t1) || (t1 && t2 == [{}]) || (t1.nil? && (t2 || []).empty?) + return false unless l2.include?(l1.to_s.downcase) || (l1 && l2 == [{}]) || (l1.nil? && (l2 || []).empty?) - private - # Node matches if it is a node, and matches the pattern as a frame - def node_match?(pattern, value, state, flags) - return false unless value['@id'] - node_object = state[:subjects][value['@id']] - node_object && filter_subject(node_object, pattern, state, flags) - end + true + end - # Value matches if it is a value, and matches the value pattern. - # - # * `pattern` is empty - # * @values are the same, or `pattern[@value]` is a wildcard, and - # * @types are the same or `value[@type]` is not null and `pattern[@type]` is `{}`, or `value[@type]` is null and `pattern[@type]` is null or `[]`, and - # * @languages are the same or `value[@language]` is not null and `pattern[@language]` is `{}`, or `value[@language]` is null and `pattern[@language]` is null or `[]`. - def value_match?(pattern, value) - v1, t1, l1 = value['@value'], value['@type'], value['@language'] - v2, t2, l2 = Array(pattern['@value']), Array(pattern['@type']), Array(pattern['@language']).map {|v| v.is_a?(String) ? v.downcase : v} - return true if (v2 + t2 + l2).empty? - return false unless v2.include?(v1) || v2 == [{}] - return false unless t2.include?(t1) || t1 && t2 == [{}] || t1.nil? && (t2 || []).empty? - return false unless l2.include?(l1.to_s.downcase) || l1 && l2 == [{}] || l1.nil? && (l2 || []).empty? - true + FRAMING_KEYWORDS = %w[@default @embed @explicit @omitDefault @requireAll].freeze end - - FRAMING_KEYWORDS = %w(@default @embed @explicit @omitDefault @requireAll).freeze end end diff --git a/lib/json/ld/from_rdf.rb b/lib/json/ld/from_rdf.rb index a40c66cc..0c945c36 100644 --- a/lib/json/ld/from_rdf.rb +++ b/lib/json/ld/from_rdf.rb @@ -1,234 +1,253 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true + require 'rdf/nquads' -module JSON::LD - module FromRDF - include Utils - - ## - # Generate a JSON-LD array representation from an array of `RDF::Statement`. - # Representation is in expanded form - # - # @param [Array, RDF::Enumerable] dataset - # @param [Boolean] useRdfType (false) - # If set to `true`, the JSON-LD processor will treat `rdf:type` like a normal property instead of using `@type`. - # @param [Boolean] useNativeTypes (false) use native representations - # @param extendedRepresentation (false) - # Use the extended internal representation for native types. - # - # @return [Array] the JSON-LD document in normalized form - def from_statements(dataset, useRdfType: false, useNativeTypes: false, extendedRepresentation: false) - default_graph = {} - graph_map = {'@default' => default_graph} - referenced_once = {} - - value = nil - - # Create an entry for compound-literal node detection - compound_literal_subjects = {} - - # Create a map for node to object representation - - # For each statement in dataset - dataset.each do |statement| - # log_debug("statement") { statement.to_nquads.chomp} - - name = statement.graph_name ? @context.expand_iri(statement.graph_name, base: @options[:base]).to_s : '@default' - - # Create a graph entry as needed - node_map = graph_map[name] ||= {} - compound_literal_subjects[name] ||= {} - - default_graph[name] ||= {'@id' => name} unless name == '@default' - - subject = statement.subject.statement? ? - resource_representation(statement.subject, useNativeTypes, extendedRepresentation)['@id'].to_json_c14n : - statement.subject.to_s - node = node_map[subject] ||= resource_representation(statement.subject, useNativeTypes, extendedRepresentation) - - # If predicate is rdf:datatype, note subject in compound literal subjects map - if @options[:rdfDirection] == 'compound-literal' && statement.predicate == RDF.to_uri + 'direction' - compound_literal_subjects[name][subject] ||= true - end +module JSON + module LD + module FromRDF + include Utils + + ## + # Generate a JSON-LD array representation from an array of `RDF::Statement`. + # Representation is in expanded form + # + # @param [Array, RDF::Enumerable] dataset + # @param [Boolean] useRdfType (false) + # If set to `true`, the JSON-LD processor will treat `rdf:type` like a normal property instead of using `@type`. + # @param [Boolean] useNativeTypes (false) use native representations + # @param extendedRepresentation (false) + # Use the extended internal representation for native types. + # + # @return [Array] the JSON-LD document in normalized form + def from_statements(dataset, useRdfType: false, useNativeTypes: false, extendedRepresentation: false) + default_graph = {} + graph_map = { '@default' => default_graph } + referenced_once = {} + + value = nil + + # Create an entry for compound-literal node detection + compound_literal_subjects = {} + + # Create a map for node to object representation + + # For each statement in dataset + dataset.each do |statement| + # log_debug("statement") { statement.to_nquads.chomp} + + name = if statement.graph_name + @context.expand_iri(statement.graph_name, + base: @options[:base]).to_s + else + '@default' + end - # If object is an IRI, blank node identifier, or statement, and node map does not have an object member, create one and initialize its value to a new JSON object consisting of a single member @id whose value is set to object. - unless statement.object.literal? - object = statement.object.statement? ? - resource_representation(statement.object, useNativeTypes, extendedRepresentation)['@id'].to_json_c14n : - statement.object.to_s - node_map[object] ||= - resource_representation(statement.object, useNativeTypes, extendedRepresentation) - end + # Create a graph entry as needed + node_map = graph_map[name] ||= {} + compound_literal_subjects[name] ||= {} - # If predicate equals rdf:type, and object is an IRI or blank node identifier, append object to the value of the @type member of node. If no such member exists, create one and initialize it to an array whose only item is object. Finally, continue to the next RDF triple. - if statement.predicate == RDF.type && statement.object.resource? && !useRdfType - merge_value(node, '@type', statement.object.to_s) - next - end + default_graph[name] ||= { '@id' => name } unless name == '@default' + + subject = if statement.subject.statement? + resource_representation(statement.subject, useNativeTypes, extendedRepresentation)['@id'].to_json_c14n + else + statement.subject.to_s + end + node = node_map[subject] ||= resource_representation(statement.subject, useNativeTypes, + extendedRepresentation) + + # If predicate is rdf:datatype, note subject in compound literal subjects map + if @options[:rdfDirection] == 'compound-literal' && statement.predicate == RDF_DIRECTION + compound_literal_subjects[name][subject] ||= true + end + + # If object is an IRI, blank node identifier, or statement, and node map does not have an object member, create one and initialize its value to a new JSON object consisting of a single member @id whose value is set to object. + unless statement.object.literal? + object = if statement.object.statement? + resource_representation(statement.object, useNativeTypes, extendedRepresentation)['@id'].to_json_c14n + else + statement.object.to_s + end + node_map[object] ||= + resource_representation(statement.object, useNativeTypes, extendedRepresentation) + end - # Set value to the result of using the RDF to Object Conversion algorithm, passing object, rdfDirection, and use native types. - value = resource_representation(statement.object, useNativeTypes, extendedRepresentation) - - merge_value(node, statement.predicate.to_s, value) - - # If object is a blank node identifier or rdf:nil, it might represent the a list node: - if statement.object == RDF.nil - # Append a new JSON object consisting of three members, node, property, and value to the usages array. The node member is set to a reference to node, property to predicate, and value to a reference to value. - object = node_map[statement.object.to_s] - merge_value(object, :usages, { - node: node, - property: statement.predicate.to_s, - value: value - }) - elsif referenced_once.key?(statement.object.to_s) - referenced_once[statement.object.to_s] = false - elsif statement.object.node? - referenced_once[statement.object.to_s] = { - node: node, - property: statement.predicate.to_s, - value: value - } + # If predicate equals rdf:type, and object is an IRI or blank node identifier, append object to the value of the @type member of node. If no such member exists, create one and initialize it to an array whose only item is object. Finally, continue to the next RDF triple. + if statement.predicate == RDF.type && statement.object.resource? && !useRdfType + merge_value(node, '@type', statement.object.to_s) + next + end + + # Set value to the result of using the RDF to Object Conversion algorithm, passing object, rdfDirection, and use native types. + value = resource_representation(statement.object, useNativeTypes, extendedRepresentation) + + merge_value(node, statement.predicate.to_s, value) + + # If object is a blank node identifier or rdf:nil, it might represent the a list node: + if statement.object == RDF.nil + # Append a new JSON object consisting of three members, node, property, and value to the usages array. The node member is set to a reference to node, property to predicate, and value to a reference to value. + object = node_map[statement.object.to_s] + merge_value(object, :usages, { + node: node, + property: statement.predicate.to_s, + value: value + }) + elsif referenced_once.key?(statement.object.to_s) + referenced_once[statement.object.to_s] = false + elsif statement.object.node? + referenced_once[statement.object.to_s] = { + node: node, + property: statement.predicate.to_s, + value: value + } + end end - end - # For each name and graph object in graph map: - graph_map.each do |name, graph_object| - - # If rdfDirection is compound-literal, check referenced_once for entries from compound_literal_subjects - compound_literal_subjects.fetch(name, {}).keys.each do |cl| - node = referenced_once[cl][:node] - next unless node.is_a?(Hash) - property = referenced_once[cl][:property] - value = referenced_once[cl][:value] - cl_node = graph_map[name].delete(cl) - next unless cl_node.is_a?(Hash) - node[property].select do |v| - next unless v['@id'] == cl - v.delete('@id') - v['@value'] = cl_node[RDF.value.to_s].first['@value'] - if cl_node[RDF.to_uri.to_s + 'language'] - lang = cl_node[RDF.to_uri.to_s + 'language'].first['@value'] - if lang !~ /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/ - warn "i18n datatype language must be valid BCP47: #{lang.inspect}" + # For each name and graph object in graph map: + graph_map.each do |name, graph_object| + # If rdfDirection is compound-literal, check referenced_once for entries from compound_literal_subjects + compound_literal_subjects.fetch(name, {}).each_key do |cl| + node = referenced_once[cl][:node] + next unless node.is_a?(Hash) + + property = referenced_once[cl][:property] + value = referenced_once[cl][:value] + cl_node = graph_map[name].delete(cl) + next unless cl_node.is_a?(Hash) + + node[property].select do |v| + next unless v['@id'] == cl + + v.delete('@id') + v['@value'] = cl_node[RDF.value.to_s].first['@value'] + if (langs = cl_node[RDF_LANGUAGE.to_s]) + lang = langs.first['@value'] + unless /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/.match?(lang) + warn "i18n datatype language must be valid BCP47: #{lang.inspect}" + end + v['@language'] = lang end - v['@language'] = lang + v['@direction'] = cl_node[RDF_DIRECTION.to_s].first['@value'] end - v['@direction'] = cl_node[RDF.to_uri.to_s + 'direction'].first['@value'] end - end - nil_var = graph_object.fetch(RDF.nil.to_s, {}) - - # For each item usage in the usages member of nil, perform the following steps: - nil_var.fetch(:usages, []).each do |usage| - node, property, head = usage[:node], usage[:property], usage[:value] - list, list_nodes = [], [] - - # If property equals rdf:rest, the value associated to the usages member of node has exactly 1 entry, node has a rdf:first and rdf:rest property, both of which have as value an array consisting of a single element, and node has no other members apart from an optional @type member whose value is an array with a single item equal to rdf:List, node represents a well-formed list node. Continue with the following steps: - # log_debug("list element?") {node.to_json(JSON_STATE) rescue 'malformed json'} - while property == RDF.rest.to_s && - blank_node?(node) && - referenced_once[node['@id']] && - node.keys.none? {|k| !["@id", '@type', :usages, RDF.first.to_s, RDF.rest.to_s].include?(k)} && - (f = node[RDF.first.to_s]).is_a?(Array) && f.length == 1 && - (r = node[RDF.rest.to_s]).is_a?(Array) && r.length == 1 && - ((t = node['@type']).nil? || t == [RDF.List.to_s]) - list << Array(node[RDF.first.to_s]).first - list_nodes << node['@id'] - - # get next node, moving backwards through list - node_usage = referenced_once[node['@id']] - node, property, head = node_usage[:node], node_usage[:property], node_usage[:value] + nil_var = graph_object.fetch(RDF.nil.to_s, {}) + + # For each item usage in the usages member of nil, perform the following steps: + nil_var.fetch(:usages, []).each do |usage| + node = usage[:node] + property = usage[:property] + head = usage[:value] + list = [] + list_nodes = [] + + # If property equals rdf:rest, the value associated to the usages member of node has exactly 1 entry, node has a rdf:first and rdf:rest property, both of which have as value an array consisting of a single element, and node has no other members apart from an optional @type member whose value is an array with a single item equal to rdf:List, node represents a well-formed list node. Continue with the following steps: + # log_debug("list element?") {node.to_json(JSON_STATE) rescue 'malformed json'} + while property == RDF.rest.to_s && + blank_node?(node) && + referenced_once[node['@id']] && + node.keys.none? { |k| !["@id", '@type', :usages, RDF.first.to_s, RDF.rest.to_s].include?(k) } && + (f = node[RDF.first.to_s]).is_a?(Array) && f.length == 1 && + (r = node[RDF.rest.to_s]).is_a?(Array) && r.length == 1 && + ((t = node['@type']).nil? || t == [RDF.List.to_s]) + list << Array(node[RDF.first.to_s]).first + list_nodes << node['@id'] + + # get next node, moving backwards through list + node_usage = referenced_once[node['@id']] + node = node_usage[:node] + property = node_usage[:property] + head = node_usage[:value] + end + + head.delete('@id') + head['@list'] = list.reverse + list_nodes.each { |node_id| graph_object.delete(node_id) } end - head.delete('@id') - head['@list'] = list.reverse - list_nodes.each {|node_id| graph_object.delete(node_id)} + # Create annotations on graph object + create_annotations(graph_object) end - # Create annotations on graph object - create_annotations(graph_object) - end - - result = [] - default_graph.keys.opt_sort(ordered: @options[:ordered]).each do |subject| - node = default_graph[subject] - if graph_map.key?(subject) - node['@graph'] = [] - graph_map[subject].keys.opt_sort(ordered: @options[:ordered]).each do |s| - n = graph_map[subject][s] - n.delete(:usages) - node['@graph'] << n unless node_reference?(n) + result = [] + default_graph.keys.opt_sort(ordered: @options[:ordered]).each do |subject| + node = default_graph[subject] + if graph_map.key?(subject) + node['@graph'] = [] + graph_map[subject].keys.opt_sort(ordered: @options[:ordered]).each do |s| + n = graph_map[subject][s] + n.delete(:usages) + node['@graph'] << n unless node_reference?(n) + end end + node.delete(:usages) + result << node unless node_reference?(node) end - node.delete(:usages) - result << node unless node_reference?(node) + # log_debug("fromRdf") {result.to_json(JSON_STATE) rescue 'malformed json'} + result end - # log_debug("fromRdf") {result.to_json(JSON_STATE) rescue 'malformed json'} - result - end - - private - RDF_LITERAL_NATIVE_TYPES = Set.new([RDF::XSD.boolean, RDF::XSD.integer, RDF::XSD.double]).freeze - - def resource_representation(resource, useNativeTypes, extendedRepresentation) - case resource - when RDF::Statement - # Note, if either subject or object are a BNode which is used elsewhere, - # this might not work will with the BNode accounting from above. - rep = {'@id' => resource_representation(resource.subject, false, extendedRepresentation)} - if resource.predicate == RDF.type - rep['@id'].merge!('@type' => resource.object.to_s) - else - rep['@id'].merge!( - resource.predicate.to_s => - as_array(resource_representation(resource.object, useNativeTypes, extendedRepresentation))) - end - rep - when RDF::Literal - base = @options[:base] - rdfDirection = @options[:rdfDirection] - res = {} - - if resource.datatype == RDF::URI(RDF.to_uri + "JSON") && @context.processingMode('json-ld-1.1') - res['@type'] = '@json' - res['@value'] = begin - ::JSON.parse(resource.object) - rescue ::JSON::ParserError => e - raise JSON::LD::JsonLdError::InvalidJsonLiteral, e.message + private + + RDF_LITERAL_NATIVE_TYPES = Set.new([RDF::XSD.boolean, RDF::XSD.integer, RDF::XSD.double]).freeze + + def resource_representation(resource, useNativeTypes, extendedRepresentation) + case resource + when RDF::Statement + # Note, if either subject or object are a BNode which is used elsewhere, + # this might not work will with the BNode accounting from above. + rep = { '@id' => resource_representation(resource.subject, false, extendedRepresentation) } + if resource.predicate == RDF.type + rep['@id']['@type'] = resource.object.to_s + else + rep['@id'][resource.predicate.to_s] = + as_array(resource_representation(resource.object, useNativeTypes, extendedRepresentation)) end - elsif useNativeTypes && extendedRepresentation - res['@value'] = resource # Raw literal - elsif resource.datatype.start_with?("https://www.w3.org/ns/i18n#") && rdfDirection == 'i18n-datatype' && @context.processingMode('json-ld-1.1') - lang, dir = resource.datatype.fragment.split('_') - res['@value'] = resource.to_s - unless lang.empty? - if lang !~ /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/ - if options[:validate] - raise JsonLdError::InvalidLanguageMapping, "rdf:language must be valid BCP47: #{lang.inspect}" - else + rep + when RDF::Literal + base = @options[:base] + rdfDirection = @options[:rdfDirection] + res = {} + + if resource.datatype == RDF_JSON && @context.processingMode('json-ld-1.1') + res['@type'] = '@json' + res['@value'] = begin + ::JSON.parse(resource.object) + rescue ::JSON::ParserError => e + raise JSON::LD::JsonLdError::InvalidJsonLiteral, e.message + end + elsif useNativeTypes && extendedRepresentation + res['@value'] = resource # Raw literal + elsif resource.datatype.start_with?("https://www.w3.org/ns/i18n#") && rdfDirection == 'i18n-datatype' && @context.processingMode('json-ld-1.1') + lang, dir = resource.datatype.fragment.split('_') + res['@value'] = resource.to_s + unless lang.empty? + unless /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/.match?(lang) + if options[:validate] + raise JsonLdError::InvalidLanguageMapping, "rdf:language must be valid BCP47: #{lang.inspect}" + end + warn "rdf:language must be valid BCP47: #{lang.inspect}" + end + res['@language'] = lang end - res['@language'] = lang + res['@direction'] = dir + elsif useNativeTypes && RDF_LITERAL_NATIVE_TYPES.include?(resource.datatype) && resource.valid? + res['@value'] = resource.object + else + resource.canonicalize! if resource.valid? && resource.datatype == RDF::XSD.double + if resource.datatype? + res['@type'] = resource.datatype.to_s + elsif resource.language? + res['@language'] = resource.language.to_s + end + res['@value'] = resource.to_s end - res['@direction'] = dir - elsif useNativeTypes && RDF_LITERAL_NATIVE_TYPES.include?(resource.datatype) && resource.valid? - res['@value'] = resource.object + res else - resource.canonicalize! if resource.valid? && resource.datatype == RDF::XSD.double - if resource.datatype? - res['@type'] = resource.datatype.to_s - elsif resource.language? - res['@language'] = resource.language.to_s - end - res['@value'] = resource.to_s + { '@id' => resource.to_s } end - res - else - {'@id' => resource.to_s} end end end diff --git a/lib/json/ld/html/nokogiri.rb b/lib/json/ld/html/nokogiri.rb index d709ae4a..d2799e8b 100644 --- a/lib/json/ld/html/nokogiri.rb +++ b/lib/json/ld/html/nokogiri.rb @@ -1,151 +1,153 @@ -module JSON::LD - class API - ## - # Nokogiri implementation of an HTML parser. - # - # @see http://nokogiri.org/ - module Nokogiri +# frozen_string_literal: true + +module JSON + module LD + class API ## - # Returns the name of the underlying XML library. + # Nokogiri implementation of an HTML parser. # - # @return [Symbol] - def self.library - :nokogiri - end - - # Proxy class to implement uniform element accessors - class NodeProxy - attr_reader :node - attr_reader :parent - - def initialize(node, parent = nil) - @node = node - @parent = parent - end - + # @see http://nokogiri.org/ + module Nokogiri ## - # Return xml:base on element, if defined + # Returns the name of the underlying XML library. # - # @return [String] - def base - @node.attribute_with_ns("base", RDF::XML.to_s) || @node.attribute('xml:base') + # @return [Symbol] + def self.library + :nokogiri end - def display_path - @display_path ||= begin - path = [] - path << parent.display_path if parent - path << @node.name - case @node - when ::Nokogiri::XML::Element then path.join("/") - when ::Nokogiri::XML::Attr then path.join("@") - else path.join("?") + # Proxy class to implement uniform element accessors + class NodeProxy + attr_reader :node, :parent + + def initialize(node, parent = nil) + @node = node + @parent = parent + end + + ## + # Return xml:base on element, if defined + # + # @return [String] + def base + @node.attribute_with_ns("base", RDF::XML.to_s) || @node.attribute('xml:base') + end + + def display_path + @display_path ||= begin + path = [] + path << parent.display_path if parent + path << @node.name + case @node + when ::Nokogiri::XML::Element then path.join("/") + when ::Nokogiri::XML::Attr then path.join("@") + else path.join("?") + end end end - end - ## - # Return true of all child elements are text - # - # @return [Array<:text, :element, :attribute>] - def text_content? - @node.children.all? {|c| c.text?} - end + ## + # Return true of all child elements are text + # + # @return [Array<:text, :element, :attribute>] + def text_content? + @node.children.all?(&:text?) + end - ## - # Children of this node - # - # @return [NodeSetProxy] - def children - NodeSetProxy.new(@node.children, self) - end + ## + # Children of this node + # + # @return [NodeSetProxy] + def children + NodeSetProxy.new(@node.children, self) + end - # Ancestors of this element, in order - def ancestors - @ancestors ||= parent ? parent.ancestors + [parent] : [] - end + # Ancestors of this element, in order + def ancestors + @ancestors ||= parent ? parent.ancestors + [parent] : [] + end - ## - # Inner text of an element. Decode Entities - # - # @return [String] - #def inner_text - # coder = HTMLEntities.new - # coder.decode(@node.inner_text) - #end - - def attribute_nodes - @attribute_nodes ||= NodeSetProxy.new(@node.attribute_nodes, self) - end + ## + # Inner text of an element. Decode Entities + # + # @return [String] + # def inner_text + # coder = HTMLEntities.new + # coder.decode(@node.inner_text) + # end + + def attribute_nodes + @attribute_nodes ||= NodeSetProxy.new(@node.attribute_nodes, self) + end - def xpath(*args) - @node.xpath(*args).map do |n| - # Get node ancestors - parent = n.ancestors.reverse.inject(nil) do |p,node| - NodeProxy.new(node, p) + def xpath(*args) + @node.xpath(*args).map do |n| + # Get node ancestors + parent = n.ancestors.reverse.inject(nil) do |p, node| + NodeProxy.new(node, p) + end + NodeProxy.new(n, parent) end - NodeProxy.new(n, parent) + end + + ## + # Proxy for everything else to @node + def method_missing(method, *args) + @node.send(method, *args) end end ## - # Proxy for everything else to @node - def method_missing(method, *args) - @node.send(method, *args) - end - end + # NodeSet proxy + class NodeSetProxy + attr_reader :node_set, :parent - ## - # NodeSet proxy - class NodeSetProxy - attr_reader :node_set - attr_reader :parent - - def initialize(node_set, parent) - @node_set = node_set - @parent = parent - end + def initialize(node_set, parent) + @node_set = node_set + @parent = parent + end - ## - # Return a proxy for each child - # - # @yield child - # @yieldparam [NodeProxy] - def each - @node_set.each do |c| - yield NodeProxy.new(c, parent) + ## + # Return a proxy for each child + # + # @yield child + # @yieldparam [NodeProxy] + def each + @node_set.each do |c| + yield NodeProxy.new(c, parent) + end end - end - ## - # Proxy for everything else to @node_set - def method_missing(method, *args) - @node_set.send(method, *args) + ## + # Proxy for everything else to @node_set + def method_missing(method, *args) + @node_set.send(method, *args) + end end - end - ## - # Initializes the underlying XML library. - # - # @param [Hash{Symbol => Object}] options - # @return [NodeProxy] of root element - def initialize_html_nokogiri(input, options = {}) - require 'nokogiri' unless defined?(::Nokogiri) - doc = case input - when ::Nokogiri::HTML::Document, ::Nokogiri::XML::Document - input - else - begin - input = input.read if input.respond_to?(:read) - ::Nokogiri::HTML5(input.force_encoding('utf-8'), max_parse_errors: 1000) - rescue LoadError, NoMethodError - ::Nokogiri::HTML.parse(input, base_uri.to_s, 'utf-8') + ## + # Initializes the underlying XML library. + # + # @param [Hash{Symbol => Object}] options + # @return [NodeProxy] of root element + def initialize_html_nokogiri(input, _options = {}) + require 'nokogiri' unless defined?(::Nokogiri) + doc = case input + when ::Nokogiri::HTML::Document, ::Nokogiri::XML::Document + input + else + begin + input = input.read if input.respond_to?(:read) + ::Nokogiri::HTML5(input.force_encoding('utf-8'), max_parse_errors: 1000) + rescue LoadError, NoMethodError + ::Nokogiri::HTML.parse(input, base_uri.to_s, 'utf-8') + end end - end - NodeProxy.new(doc.root) if doc && doc.root + NodeProxy.new(doc.root) if doc&.root + end + alias initialize_html initialize_html_nokogiri end - alias_method :initialize_html, :initialize_html_nokogiri end end end diff --git a/lib/json/ld/html/rexml.rb b/lib/json/ld/html/rexml.rb index d1b41b24..08ff6680 100644 --- a/lib/json/ld/html/rexml.rb +++ b/lib/json/ld/html/rexml.rb @@ -1,187 +1,191 @@ +# frozen_string_literal: true + require 'htmlentities' -module JSON::LD - class API - ## - # REXML implementation of an XML parser. - # - # @see http://www.germane-software.com/software/rexml/ - module REXML +module JSON + module LD + class API ## - # Returns the name of the underlying XML library. + # REXML implementation of an XML parser. # - # @return [Symbol] - def self.library - :rexml - end - - # Proxy class to implement uniform element accessors - class NodeProxy - attr_reader :node - attr_reader :parent - - def initialize(node, parent = nil) - @node = node - @parent = parent - end - + # @see http://www.germane-software.com/software/rexml/ + module REXML ## - # Return xml:base on element, if defined + # Returns the name of the underlying XML library. # - # @return [String] - def base - @node.attribute("base", RDF::XML.to_s) || @node.attribute('xml:base') + # @return [Symbol] + def self.library + :rexml end - def display_path - @display_path ||= begin - path = [] - path << parent.display_path if parent - path << @node.name - case @node - when ::REXML::Element then path.join("/") - when ::REXML::Attribute then path.join("@") - else path.join("?") + # Proxy class to implement uniform element accessors + class NodeProxy + attr_reader :node, :parent + + def initialize(node, parent = nil) + @node = node + @parent = parent + end + + ## + # Return xml:base on element, if defined + # + # @return [String] + def base + @node.attribute("base", RDF::XML.to_s) || @node.attribute('xml:base') + end + + def display_path + @display_path ||= begin + path = [] + path << parent.display_path if parent + path << @node.name + case @node + when ::REXML::Element then path.join("/") + when ::REXML::Attribute then path.join("@") + else path.join("?") + end end end - end - ## - # Return true of all child elements are text - # - # @return [Array<:text, :element, :attribute>] - def text_content? - @node.children.all? {|c| c.is_a?(::REXML::Text)} - end + ## + # Return true of all child elements are text + # + # @return [Array<:text, :element, :attribute>] + def text_content? + @node.children.all?(::REXML::Text) + end - ## - # Children of this node - # - # @return [NodeSetProxy] - def children - NodeSetProxy.new(@node.children, self) - end + ## + # Children of this node + # + # @return [NodeSetProxy] + def children + NodeSetProxy.new(@node.children, self) + end - # Ancestors of this element, in order - def ancestors - @ancestors ||= parent ? parent.ancestors + [parent] : [] - end + # Ancestors of this element, in order + def ancestors + @ancestors ||= parent ? parent.ancestors + [parent] : [] + end - ## - # Inner text of an element - # - # @see http://apidock.com/ruby/REXML/Element/get_text#743-Get-all-inner-texts - # @return [String] - def inner_text - coder = HTMLEntities.new - ::REXML::XPath.match(@node,'.//text()').map { |e| - coder.decode(e) - }.join - end + ## + # Inner text of an element + # + # @see http://apidock.com/ruby/REXML/Element/get_text#743-Get-all-inner-texts + # @return [String] + def inner_text + coder = HTMLEntities.new + ::REXML::XPath.match(@node, './/text()').map do |e| + coder.decode(e) + end.join + end - ## - # Inner text of an element - # - # @see http://apidock.com/ruby/REXML/Element/get_text#743-Get-all-inner-texts - # @return [String] - def inner_html - @node.children.map(&:to_s).join - end + ## + # Inner text of an element + # + # @see http://apidock.com/ruby/REXML/Element/get_text#743-Get-all-inner-texts + # @return [String] + def inner_html + @node.children.map(&:to_s).join + end - def attribute_nodes - attrs = @node.attributes.dup.keep_if do |name, attr| - !name.start_with?('xmlns') + def attribute_nodes + attrs = @node.attributes.dup.keep_if do |name, _attr| + !name.start_with?('xmlns') + end + @attribute_nodes ||= (attrs.empty? ? attrs : NodeSetProxy.new(attrs, self)) end - @attribute_nodes ||= (attrs.empty? ? attrs : NodeSetProxy.new(attrs, self)) - end - ## - # Node type accessors - # - # @return [Boolean] - def text? - @node.is_a?(::REXML::Text) - end + ## + # Node type accessors + # + # @return [Boolean] + def text? + @node.is_a?(::REXML::Text) + end - def element? - @node.is_a?(::REXML::Element) - end + def element? + @node.is_a?(::REXML::Element) + end - def blank? - @node.is_a?(::REXML::Text) && @node.empty? - end + def blank? + @node.is_a?(::REXML::Text) && @node.empty? + end - def to_s; @node.to_s; end + def to_s + @node.to_s + end - def xpath(*args) - ::REXML::XPath.match(@node, *args).map do |n| - NodeProxy.new(n, parent) + def xpath(*args) + ::REXML::XPath.match(@node, *args).map do |n| + NodeProxy.new(n, parent) + end + end + + def at_xpath(*args) + xpath(*args).first end - end - def at_xpath(*args) - xpath(*args).first + ## + # Proxy for everything else to @node + def method_missing(method, *args) + @node.send(method, *args) + end end ## - # Proxy for everything else to @node - def method_missing(method, *args) - @node.send(method, *args) - end - end + # NodeSet proxy + class NodeSetProxy + attr_reader :node_set, :parent - ## - # NodeSet proxy - class NodeSetProxy - attr_reader :node_set - attr_reader :parent - - def initialize(node_set, parent) - @node_set = node_set - @parent = parent - end + def initialize(node_set, parent) + @node_set = node_set + @parent = parent + end - ## - # Return a proxy for each child - # - # @yield child - # @yieldparam [NodeProxy] - def each - @node_set.each do |c| - yield NodeProxy.new(c, parent) + ## + # Return a proxy for each child + # + # @yield child + # @yieldparam [NodeProxy] + def each + @node_set.each do |c| + yield NodeProxy.new(c, parent) + end end - end - ## - def to_html - node_set.map(&:to_s).join("") + ## + def to_html + node_set.map(&:to_s).join + end + + ## + # Proxy for everything else to @node_set + def method_missing(method, *args) + @node_set.send(method, *args) + end end ## - # Proxy for everything else to @node_set - def method_missing(method, *args) - @node_set.send(method, *args) - end - end + # Initializes the underlying XML library. + # + # @param [Hash{Symbol => Object}] options + # @return [NodeProxy] of document root + def initialize_html_rexml(input, _options = {}) + require 'rexml/document' unless defined?(::REXML) + doc = case input + when ::REXML::Document + input + else + # Only parse as XML, no HTML mode + ::REXML::Document.new(input.respond_to?(:read) ? input.read : input.to_s) + end - ## - # Initializes the underlying XML library. - # - # @param [Hash{Symbol => Object}] options - # @return [NodeProxy] of document root - def initialize_html_rexml(input, options = {}) - require 'rexml/document' unless defined?(::REXML) - doc = case input - when ::REXML::Document - input - else - # Only parse as XML, no HTML mode - ::REXML::Document.new(input.respond_to?(:read) ? input.read : input.to_s) + NodeProxy.new(doc.root) if doc&.root end - - NodeProxy.new(doc.root) if doc && doc.root + alias initialize_html initialize_html_rexml end - alias_method :initialize_html, :initialize_html_rexml end end end diff --git a/lib/json/ld/reader.rb b/lib/json/ld/reader.rb index f1c89882..64ed74da 100644 --- a/lib/json/ld/reader.rb +++ b/lib/json/ld/reader.rb @@ -1,116 +1,123 @@ -# -*- encoding: utf-8 -*- - -module JSON::LD - ## - # A JSON-LD parser in Ruby. - # - # @see https://www.w3.org/TR/json-ld11-api - # @author [Gregg Kellogg](http://greggkellogg.net/) - class Reader < RDF::Reader - include StreamingReader - format Format +# frozen_string_literal: true +module JSON + module LD ## - # JSON-LD Reader options - # @see https://ruby-rdf.github.io/rdf/RDF/Reader#options-class_method - def self.options - super + [ - RDF::CLI::Option.new( - symbol: :expandContext, - control: :url2, - datatype: RDF::URI, - on: ["--expand-context CONTEXT"], - description: "Context to use when expanding.") {|arg| RDF::URI(arg).absolute? ? RDF::URI(arg) : StringIO.new(File.read(arg))}, - RDF::CLI::Option.new( - symbol: :extractAllScripts, - datatype: TrueClass, - default: false, - control: :checkbox, - on: ["--[no-]extract-all-scripts"], - description: "If set to true, when extracting JSON-LD script elements from HTML, unless a specific fragment identifier is targeted, extracts all encountered JSON-LD script elements using an array form, if necessary.") {|arg| RDF::URI(arg)}, - RDF::CLI::Option.new( - symbol: :lowercaseLanguage, - datatype: TrueClass, - control: :checkbox, - on: ["--[no-]lowercase-language"], - description: "By default, language tags are left as is. To normalize to lowercase, set this option to `true`."), - RDF::CLI::Option.new( - symbol: :processingMode, - datatype: %w(json-ld-1.0 json-ld-1.1), - control: :radio, - on: ["--processingMode MODE", %w(json-ld-1.0 json-ld-1.1)], - description: "Set Processing Mode (json-ld-1.0 or json-ld-1.1)"), - RDF::CLI::Option.new( - symbol: :rdfDirection, - datatype: %w(i18n-datatype compound-literal), - default: 'null', - control: :select, - on: ["--rdf-direction DIR", %w(i18n-datatype compound-literal)], - description: "How to serialize literal direction (i18n-datatype compound-literal)") {|arg| RDF::URI(arg)}, - RDF::CLI::Option.new( - symbol: :stream, - datatype: TrueClass, - control: :checkbox, - on: ["--[no-]stream"], - description: "Optimize for streaming JSON-LD to RDF.") {|arg| arg}, - ] - end - - ## - # Initializes the JSON-LD reader instance. + # A JSON-LD parser in Ruby. # - # @param [IO, File, String] input - # @param [Hash{Symbol => Object}] options - # any additional options (see `RDF::Reader#initialize` and {JSON::LD::API.initialize}) - # @yield [reader] `self` - # @yieldparam [RDF::Reader] reader - # @yieldreturn [void] ignored - # @raise [RDF::ReaderError] if the JSON document cannot be loaded - def initialize(input = $stdin, **options, &block) - options[:base_uri] ||= options[:base] - options[:rename_bnodes] = false unless options.key?(:rename_bnodes) - super do - @options[:base] ||= base_uri.to_s if base_uri - # Trim non-JSON stuff in script. - @doc = if input.respond_to?(:read) - input - else - StringIO.new(input.to_s.sub(%r(\A[^{\[]*)m, '').sub(%r([^}\]]*\Z)m, '')) - end + # @see https://www.w3.org/TR/json-ld11-api + # @author [Gregg Kellogg](http://greggkellogg.net/) + class Reader < RDF::Reader + include StreamingReader + format Format - if block_given? - case block.arity + ## + # JSON-LD Reader options + # @see https://ruby-rdf.github.io/rdf/RDF/Reader#options-class_method + def self.options + super + [ + RDF::CLI::Option.new( + symbol: :expandContext, + control: :url2, + datatype: RDF::URI, + on: ["--expand-context CONTEXT"], + description: "Context to use when expanding." + ) { |arg| RDF::URI(arg).absolute? ? RDF::URI(arg) : StringIO.new(File.read(arg)) }, + RDF::CLI::Option.new( + symbol: :extractAllScripts, + datatype: TrueClass, + default: false, + control: :checkbox, + on: ["--[no-]extract-all-scripts"], + description: "If set to true, when extracting JSON-LD script elements from HTML, unless a specific fragment identifier is targeted, extracts all encountered JSON-LD script elements using an array form, if necessary." + ) { |arg| RDF::URI(arg) }, + RDF::CLI::Option.new( + symbol: :lowercaseLanguage, + datatype: TrueClass, + control: :checkbox, + on: ["--[no-]lowercase-language"], + description: "By default, language tags are left as is. To normalize to lowercase, set this option to `true`." + ), + RDF::CLI::Option.new( + symbol: :processingMode, + datatype: %w[json-ld-1.0 json-ld-1.1], + control: :radio, + on: ["--processingMode MODE", %w[json-ld-1.0 json-ld-1.1]], + description: "Set Processing Mode (json-ld-1.0 or json-ld-1.1)" + ), + RDF::CLI::Option.new( + symbol: :rdfDirection, + datatype: %w[i18n-datatype compound-literal], + default: 'null', + control: :select, + on: ["--rdf-direction DIR", %w[i18n-datatype compound-literal]], + description: "How to serialize literal direction (i18n-datatype compound-literal)" + ) { |arg| RDF::URI(arg) }, + RDF::CLI::Option.new( + symbol: :stream, + datatype: TrueClass, + control: :checkbox, + on: ["--[no-]stream"], + description: "Optimize for streaming JSON-LD to RDF." + ) { |arg| arg } + ] + end + + ## + # Initializes the JSON-LD reader instance. + # + # @param [IO, File, String] input + # @param [Hash{Symbol => Object}] options + # any additional options (see `RDF::Reader#initialize` and {JSON::LD::API.initialize}) + # @yield [reader] `self` + # @yieldparam [RDF::Reader] reader + # @yieldreturn [void] ignored + # @raise [RDF::ReaderError] if the JSON document cannot be loaded + def initialize(input = $stdin, **options, &block) + options[:base_uri] ||= options[:base] + options[:rename_bnodes] = false unless options.key?(:rename_bnodes) + super do + @options[:base] ||= base_uri.to_s if base_uri + # Trim non-JSON stuff in script. + @doc = if input.respond_to?(:read) + input + else + StringIO.new(input.to_s.sub(/\A[^{\[]*/m, '').sub(/[^}\]]*\Z/m, '')) + end + + if block + case block.arity when 0 then instance_eval(&block) - else block.call(self) + else yield(self) + end end end end - end - ## - # @private - # @see RDF::Reader#each_statement - def each_statement(&block) - if @options[:stream] - stream_statement(&block) - else - API.toRdf(@doc, **@options, &block) + ## + # @private + # @see RDF::Reader#each_statement + def each_statement(&block) + if @options[:stream] + stream_statement(&block) + else + API.toRdf(@doc, **@options, &block) + end + rescue ::JSON::ParserError, ::JSON::LD::JsonLdError => e + log_fatal("Failed to parse input document: #{e.message}", exception: RDF::ReaderError) end - rescue ::JSON::ParserError, ::JSON::LD::JsonLdError => e - log_fatal("Failed to parse input document: #{e.message}", exception: RDF::ReaderError) - end - ## - # @private - # @see RDF::Reader#each_triple - def each_triple(&block) - if block_given? - each_statement do |statement| - yield(*statement.to_triple) + ## + # @private + # @see RDF::Reader#each_triple + def each_triple + if block_given? + each_statement do |statement| + yield(*statement.to_triple) + end end + enum_for(:each_triple) end - enum_for(:each_triple) end end end - diff --git a/lib/json/ld/resource.rb b/lib/json/ld/resource.rb index 17c94299..cca2f4cf 100644 --- a/lib/json/ld/resource.rb +++ b/lib/json/ld/resource.rb @@ -1,230 +1,249 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true -module JSON::LD - # Simple Ruby reflector class to provide native - # access to JSON-LD objects - class Resource - include RDF::Enumerable - - # @return [Hash Object] Object representation of resource - attr_reader :attributes - - # @return [String] ID of this resource - attr_reader :id - - # @return [JSON::LD::Context] Context associated with this resource - attr_reader :context - - ## - # Is this resource clean (i.e., saved to mongo?) - # - # @return [Boolean] - def clean?; @clean; end - - ## - # Is this resource dirty (i.e., not yet saved to mongo?) - # - # @return [Boolean] - def dirty?; !clean?; end - - ## - # Has this resource been reconciled against a mongo ID? - # - # @return [Boolean] - def reconciled?; @reconciled; end - - ## - # Has this resource been resolved so that - # all references are to other Resources? - # - # @return [Boolean] - def resolved?; @resolved; end - - ## - # Anonymous resources have BNode ids or no schema:url - # - # @return [Boolean] - def anonymous?; @anon; end - - ## - # Is this a stub resource, which has not yet been - # synched or created within the DB? - def stub?; !!@stub; end - - ## - # Is this a new resource, which has not yet been - # synched or created within the DB? - def new?; !!@new; end - - ## - # A new resource from the parsed graph - # @param [Hash{String => Object}] node_definition - # @param [Hash{Symbol => Object}] options - # @option options [String] :context - # Resource context, used for finding - # appropriate collection and JSON-LD context. - # @option options [Boolean] :clean (false) - # @option options [Boolean] :compact (false) - # Assume `node_definition` is in expanded form - # and compact using `context`. - # @option options [Boolean] :reconciled (!new) - # node_definition is not based on Mongo IDs - # and must be reconciled against Mongo, or merged - # into another resource. - # @option options [Boolean] :new (true) - # This is a new resource, not yet saved to Mongo - # @option options [Boolean] :stub (false) - # This is a stand-in for another resource that has - # not yet been retrieved (or created) from Mongo - def initialize(node_definition, **options) - @context = options[:context] - @clean = options.fetch(:clean, false) - @new = options.fetch(:new, true) - @reconciled = options.fetch(:reconciled, !@new) - @resolved = false - @attributes = if options[:compact] - JSON::LD::API.compact(node_definition, @context) - else - node_definition - end - @id = @attributes['@id'] - @anon = @id.nil? || @id.to_s.start_with?('_:') - end - ## - # Return a hash of this object, suitable for use by for ETag - # @return [Integer] - def hash - self.deresolve.hash - end +module JSON + module LD + # Simple Ruby reflector class to provide native + # access to JSON-LD objects + class Resource + include RDF::Enumerable + + # @return [Hash Object] Object representation of resource + attr_reader :attributes + + # @return [String] ID of this resource + attr_reader :id + + # @return [JSON::LD::Context] Context associated with this resource + attr_reader :context + + ## + # Is this resource clean (i.e., saved to mongo?) + # + # @return [Boolean] + def clean? + @clean + end + + ## + # Is this resource dirty (i.e., not yet saved to mongo?) + # + # @return [Boolean] + def dirty? + !clean? + end + + ## + # Has this resource been reconciled against a mongo ID? + # + # @return [Boolean] + def reconciled? + @reconciled + end + + ## + # Has this resource been resolved so that + # all references are to other Resources? + # + # @return [Boolean] + def resolved? + @resolved + end + + ## + # Anonymous resources have BNode ids or no schema:url + # + # @return [Boolean] + def anonymous? + @anon + end + + ## + # Is this a stub resource, which has not yet been + # synched or created within the DB? + def stub? + !!@stub + end + + ## + # Is this a new resource, which has not yet been + # synched or created within the DB? + def new? + !!@new + end + + ## + # A new resource from the parsed graph + # @param [Hash{String => Object}] node_definition + # @param [Hash{Symbol => Object}] options + # @option options [String] :context + # Resource context, used for finding + # appropriate collection and JSON-LD context. + # @option options [Boolean] :clean (false) + # @option options [Boolean] :compact (false) + # Assume `node_definition` is in expanded form + # and compact using `context`. + # @option options [Boolean] :reconciled (!new) + # node_definition is not based on Mongo IDs + # and must be reconciled against Mongo, or merged + # into another resource. + # @option options [Boolean] :new (true) + # This is a new resource, not yet saved to Mongo + # @option options [Boolean] :stub (false) + # This is a stand-in for another resource that has + # not yet been retrieved (or created) from Mongo + def initialize(node_definition, **options) + @context = options[:context] + @clean = options.fetch(:clean, false) + @new = options.fetch(:new, true) + @reconciled = options.fetch(:reconciled, !@new) + @resolved = false + @attributes = if options[:compact] + JSON::LD::API.compact(node_definition, @context) + else + node_definition + end + @id = @attributes['@id'] + @anon = @id.nil? || @id.to_s.start_with?('_:') + end + + ## + # Return a hash of this object, suitable for use by for ETag + # @return [Integer] + def hash + deresolve.hash + end - ## - # Reverse resolution of resource attributes. - # Just returns `attributes` if - # resource is unresolved. Otherwise, replaces `Resource` - # values with node references. - # - # Result is expanded and re-compacted to get to normalized - # representation. - # - # @return [Hash] deresolved attribute hash - def deresolve - node_definition = if resolved? - deresolved = [].tap do |memo| - attributes.each_pair do |prop, value| - memo[prop] = case value - when Resource - {'id' => value.id} - when Array - value.map do |v| - v.is_a?(Resource) ? {'id' => v.id} : v + ## + # Reverse resolution of resource attributes. + # Just returns `attributes` if + # resource is unresolved. Otherwise, replaces `Resource` + # values with node references. + # + # Result is expanded and re-compacted to get to normalized + # representation. + # + # @return [Hash] deresolved attribute hash + def deresolve + node_definition = if resolved? + deresolved = [].tap do |memo| + attributes.each_pair do |prop, value| + memo[prop] = case value + when Resource + { 'id' => value.id } + when Array + value.map do |v| + v.is_a?(Resource) ? { 'id' => v.id } : v + end + else + value end - else - value end end + deresolved + else + attributes end - deresolved - else - attributes - end - compacted = nil - JSON::LD::API.expand(node_definition, expandContext: @context) do |expanded| - compacted = JSON::LD::API.compact(expanded, @context) + compacted = nil + JSON::LD::API.expand(node_definition, expandContext: @context) do |expanded| + compacted = JSON::LD::API.compact(expanded, @context) + end + compacted.delete_if { |k, _v| k == '@context' } end - compacted.delete_if {|k, v| k == '@context'} - end - ## - # Serialize to JSON-LD, minus `@context` using - # a deresolved version of the attributes - # - # @param [Hash] options - # @return [String] serizlied JSON representation of resource - def to_json(**options) - deresolve.to_json(**options) - end + ## + # Serialize to JSON-LD, minus `@context` using + # a deresolved version of the attributes + # + # @param [Hash] options + # @return [String] serizlied JSON representation of resource + def to_json(**options) + deresolve.to_json(**options) + end - ## - # Enumerate over statements associated with this resource - def each(&block) - JSON::LD::API.toRdf(attributes, expandContext: context, &block) - end + ## + # Enumerate over statements associated with this resource + def each(&block) + JSON::LD::API.toRdf(attributes, expandContext: context, &block) + end - ## - # Update node references using the provided map. - # This replaces node references with Resources, - # either stub or instantiated. - # - # Node references with ids not in the reference_map - # will cause stub resources to be added to the map. - # - # @param [Hash{String => Resource}] reference_map - # @return [Resource] self - def resolve(reference_map) - return if resolved? - def update_obj(obj, reference_map) - case obj - when Array - obj.map {|o| update_obj(o, reference_map)} - when Hash - if node_reference?(obj) - reference_map[obj['id']] ||= Resource.new(obj, - context: @context_name, - clean: false, - stub: true - ) - else - obj.each_key do |k| - obj[k] = update_obj(obj[k], reference_map) + ## + # Update node references using the provided map. + # This replaces node references with Resources, + # either stub or instantiated. + # + # Node references with ids not in the reference_map + # will cause stub resources to be added to the map. + # + # @param [Hash{String => Resource}] reference_map + # @return [Resource] self + def resolve(reference_map) + return if resolved? + + def update_obj(obj, reference_map) + case obj + when Array + obj.map { |o| update_obj(o, reference_map) } + when Hash + if node_reference?(obj) + reference_map[obj['id']] ||= Resource.new(obj, + context: @context_name, + clean: false, + stub: true) + else + obj.each_key do |k| + obj[k] = update_obj(obj[k], reference_map) + end + obj end + else obj end - else - obj end - end - #$logger.debug "resolve(0): #{attributes.inspect}" - @attributes.each do |k, v| - next if k == 'id' || k == 'type' - @attributes[k] = update_obj(@attributes[k], reference_map) + # $logger.debug "resolve(0): #{attributes.inspect}" + @attributes.each do |k, _v| + next if %w[id type].include?(k) + + @attributes[k] = update_obj(@attributes[k], reference_map) + end + # $logger.debug "resolve(1): #{attributes.inspect}" + @resolved = true + self end - #$logger.debug "resolve(1): #{attributes.inspect}" - @resolved = true - self - end - ## - # Override this method to implement save using - # an appropriate storage mechanism. - # - # Save the object to the Mongo collection - # use Upsert to create things that don't exist. - # First makes sure that the resource is valid. - # - # @return [Boolean] true or false if resource not saved - def save - raise NotImplementedError - end + ## + # Override this method to implement save using + # an appropriate storage mechanism. + # + # Save the object to the Mongo collection + # use Upsert to create things that don't exist. + # First makes sure that the resource is valid. + # + # @return [Boolean] true or false if resource not saved + def save + raise NotImplementedError + end - ## - # Access individual fields, from subject definition - def property(prop_name); @attributes.fetch(prop_name, nil); end + ## + # Access individual fields, from subject definition + def property(prop_name) + @attributes.fetch(prop_name, nil) + end - ## - # Access individual fields, from subject definition - def method_missing(method, *args) - property(method.to_s) - end + ## + # Access individual fields, from subject definition + def method_missing(method, *_args) + property(method.to_s) + end - def inspect - "" + def inspect + "" + end end end end diff --git a/lib/json/ld/streaming_reader.rb b/lib/json/ld/streaming_reader.rb index 6f60861c..c4cd2d49 100644 --- a/lib/json/ld/streaming_reader.rb +++ b/lib/json/ld/streaming_reader.rb @@ -1,579 +1,646 @@ -# -*- encoding: utf-8 -*- +# frozen_string_literal: true + require 'json/ld' require 'json/ld/expand' require 'json/ld/to_rdf' -module JSON::LD - ## - # A streaming JSON-LD parser in Ruby. - # - # @see http://json-ld.org/spec/ED/20110507/ - # @author [Gregg Kellogg](http://greggkellogg.net/) - module StreamingReader - include Utils - include JSON::LD::ToRDF # For value object conversion - - # The base URI to use when resolving relative URIs - # @return [RDF::URI] - attr_reader :base - attr_reader :namer +module JSON + module LD + ## + # A streaming JSON-LD parser in Ruby. + # + # @see http://json-ld.org/spec/ED/20110507/ + # @author [Gregg Kellogg](http://greggkellogg.net/) + module StreamingReader + include Utils + include JSON::LD::ToRDF # For value object conversion + + # The base URI to use when resolving relative URIs + # @return [RDF::URI] + attr_reader :base + attr_reader :namer + + def self.format + JSON::LD::Format + end - def self.format; JSON::LD::Format; end + ## + # @see RDF::Reader#each_statement + def stream_statement + unique_bnodes = @options[:unique_bnodes] + rename_bnodes = @options.fetch(:rename_bnodes, true) + # FIXME: document loader doesn't stream + @base = RDF::URI(@options[:base] || base_uri) + mj_opts = @options.keep_if { |k, v| k != :adapter || MUTLI_JSON_ADAPTERS.include?(v) } + value = MultiJson.load(@doc, mj_opts) + context_ref = @options[:expandContext] + # context_ref = @options.fetch(:expandContext, remote_doc.contextUrl) + context = Context.parse(context_ref, **@options) + + @namer = if unique_bnodes + BlankNodeUniqer.new + else + (rename_bnodes ? BlankNodeNamer.new("b") : BlankNodeMapper.new) + end + # Namer for naming provisional nodes, which may be determined later to be actual + @provisional_namer = BlankNodeNamer.new("p") - ## - # @see RDF::Reader#each_statement - def stream_statement(&block) - unique_bnodes, rename_bnodes = @options[:unique_bnodes], @options.fetch(:rename_bnodes, true) - # FIXME: document loader doesn't stream - @base = RDF::URI(@options[:base] || base_uri) - mj_opts = @options.keep_if {|k,v| k != :adapter || MUTLI_JSON_ADAPTERS.include?(v)} - value = MultiJson.load(@doc, mj_opts) - context_ref = @options[:expandContext] - #context_ref = @options.fetch(:expandContext, remote_doc.contextUrl) - context = Context.parse(context_ref, **@options) - - @namer = unique_bnodes ? BlankNodeUniqer.new : (rename_bnodes ? BlankNodeNamer.new("b") : BlankNodeMapper.new) - # Namer for naming provisional nodes, which may be determined later to be actual - @provisional_namer = BlankNodeNamer.new("p") - - parse_object(value, nil, context, graph_is_named: false) do |st| - # Only output reasonably valid triples - if st.to_a.all? {|r| r.is_a?(RDF::Term) && (r.uri? ? r.valid? : true)} - block.call(st) + parse_object(value, nil, context, graph_is_named: false) do |st| + # Only output reasonably valid triples + yield(st) if st.to_a.all? { |r| r.is_a?(RDF::Term) && (r.uri? ? r.valid? : true) } end + rescue ::JSON::ParserError, ::JSON::LD::JsonLdError => e + log_fatal("Failed to parse input document: #{e.message}", exception: RDF::ReaderError) end - rescue ::JSON::ParserError, ::JSON::LD::JsonLdError => e - log_fatal("Failed to parse input document: #{e.message}", exception: RDF::ReaderError) - end - private + private + + # Parse a node object, or array of node objects + # + # @param [Array, Hash] input + # @param [String] active_property + # The unexpanded property referencing this object + # @param [Context] context + # @param [RDF::Resource] subject referencing this object + # @param [RDF::URI] predicate the predicate part of the reference + # @param [Boolean] from_map + # Expanding from a map, which could be an `@type` map, so don't clear out context term definitions + # @param [Boolean] graph_is_named + # Use of `@graph` implies a named graph; not true at the top-level. + # @param [RDF::URI] extra_type from a type map + # @param [String] language from a language map + # @param [RDF::Resource] node_id from an id map + # @return [void] + def parse_object(input, active_property, context, + subject: nil, predicate: nil, from_map: false, + extra_type: nil, language: nil, node_id: nil, + graph_is_named: true, &block) + + # Skip predicates that look like a BNode + if predicate.to_s.start_with?('_:') + warn "[DEPRECATION] Blank Node properties deprecated in JSON-LD 1.1." + return + end - # Parse a node object, or array of node objects - # - # @param [Array, Hash] input - # @param [String] active_property - # The unexpanded property referencing this object - # @param [Context] context - # @param [RDF::Resource] subject referencing this object - # @param [RDF::URI] predicate the predicate part of the reference - # @param [Boolean] from_map - # Expanding from a map, which could be an `@type` map, so don't clear out context term definitions - # @param [Boolean] graph_is_named - # Use of `@graph` implies a named graph; not true at the top-level. - # @param [RDF::URI] extra_type from a type map - # @param [String] language from a language map - # @param [RDF::Resource] node_id from an id map - # @return [void] - def parse_object(input, active_property, context, - subject: nil, predicate: nil, from_map: false, - extra_type: nil, language: nil, node_id: nil, - graph_is_named: true, &block) - - # Skip predicates that look like a BNode - if predicate.to_s.start_with?('_:') - warn "[DEPRECATION] Blank Node properties deprecated in JSON-LD 1.1." - return - end + if input.is_a?(Array) + input.each do |e| + parse_object(e, active_property, context, subject: subject, predicate: predicate, from_map: from_map, + &block) + end + return + end - if input.is_a?(Array) - input.each {|e| parse_object(e, active_property, context, subject: subject, predicate: predicate, from_map: from_map, &block)} - return - end + # Note that we haven't parsed an @id key, so have no subject + have_id = false + node_reference = false + is_list_or_set = false + node_id ||= RDF::Node.new(@provisional_namer.get_sym) + # For keeping statements not yet ready to be emitted + provisional_statements = [] + value_object = {} + + # Use a term-specific context, if defined, based on the non-type-scoped context. + if active_property && context.term_definitions[active_property] + property_scoped_context = context.term_definitions[active_property].context + end - # Note that we haven't parsed an @id key, so have no subject - have_id, node_reference, is_list_or_set = false, false, false - node_id ||= RDF::Node.new(@provisional_namer.get_sym) - # For keeping statements not yet ready to be emitted - provisional_statements = [] - value_object = {} - - # Use a term-specific context, if defined, based on the non-type-scoped context. - property_scoped_context = context.term_definitions[active_property].context if active_property && context.term_definitions[active_property] - - # Revert any previously type-scoped term definitions, unless this is from a map, a value object or a subject reference - # FIXME - if input.is_a?(Hash) && context.previous_context - expanded_key_map = input.keys.inject({}) do |memo, key| - memo.merge(key => context.expand_iri(key, vocab: true, as_string: true, base: base)) + # Revert any previously type-scoped term definitions, unless this is from a map, a value object or a subject reference + # FIXME + if input.is_a?(Hash) && context.previous_context + expanded_key_map = input.keys.inject({}) do |memo, key| + memo.merge(key => context.expand_iri(key, vocab: true, as_string: true, base: base)) + end + revert_context = !from_map && + !expanded_key_map.value?('@value') && + expanded_key_map.values != ['@id'] + context = context.previous_context if revert_context end - revert_context = !from_map && - !expanded_key_map.values.include?('@value') && - !(expanded_key_map.values == ['@id']) - context = context.previous_context if revert_context - end - # Apply property-scoped context after reverting term-scoped context - context = context.parse(property_scoped_context, base: base, override_protected: true) unless - property_scoped_context.nil? + # Apply property-scoped context after reverting term-scoped context + context = context.parse(property_scoped_context, base: base, override_protected: true) unless + property_scoped_context.nil? - # Otherwise, unless the value is a number, expand the value according to the Value Expansion rules, passing active property. - unless input.is_a?(Hash) - input = context.expand_value(active_property, input, base: base) - end + # Otherwise, unless the value is a number, expand the value according to the Value Expansion rules, passing active property. + input = context.expand_value(active_property, input, base: base) unless input.is_a?(Hash) - # Output any type provided from a type map - provisional_statements << RDF::Statement(node_id, RDF.type, extra_type) if - extra_type + # Output any type provided from a type map + provisional_statements << RDF::Statement(node_id, RDF.type, extra_type) if + extra_type - # Add statement, either provisionally, or just emit - add_statement = Proc.new do |st| - if have_id || st.to_quad.none? {|r| r == node_id} - block.call(st) - else - provisional_statements << st + # Add statement, either provisionally, or just emit + add_statement = proc do |st| + if have_id || st.to_quad.none?(node_id) + yield(st) + else + provisional_statements << st + end end - end - # Input is an object (Hash), parse keys in order - state = :await_context - input.each do |key, value| - expanded_key = context.expand_iri(key, base: base, vocab: true) - case expanded_key - when '@context' - raise JsonLdError::InvalidStreamingKeyOrder, - "found #{key} in state #{state}" unless state == :await_context - context = context.parse(value, base: base) - state = :await_type - when '@type' - # Set the type-scoped context to the context on input, for use later - raise JsonLdError::InvalidStreamingKeyOrder, - "found #{key} in state #{state}" unless %i(await_context await_type).include?(state) - - type_scoped_context = context - as_array(value).sort.each do |term| - raise JsonLdError::InvalidTypeValue, - "value of @type must be a string: #{term.inspect}" if !term.is_a?(String) - term_context = type_scoped_context.term_definitions[term].context if type_scoped_context.term_definitions[term] - context = context.parse(term_context, base: base, propagate: false) unless term_context.nil? - type = type_scoped_context.expand_iri(term, - base: base, - documentRelative: true, - vocab: true) - - # Early terminate for @json - type = RDF.JSON if type == '@json' - # Add a provisional statement - provisional_statements << RDF::Statement(node_id, RDF.type, type) - end - state = :await_type - when '@id' - raise JsonLdError::InvalidSetOrListObject, - "found #{key} in state #{state}" if is_list_or_set - raise JsonLdError::CollidingKeywords, - "found #{key} in state #{state}" unless %i(await_context await_type await_id).include?(state) + # Input is an object (Hash), parse keys in order + state = :await_context + input.each do |key, value| + expanded_key = context.expand_iri(key, base: base, vocab: true) + case expanded_key + when '@context' + unless state == :await_context + raise JsonLdError::InvalidStreamingKeyOrder, + "found #{key} in state #{state}" + end + context = context.parse(value, base: base) + state = :await_type + when '@type' + # Set the type-scoped context to the context on input, for use later + unless %i[await_context await_type].include?(state) + raise JsonLdError::InvalidStreamingKeyOrder, + "found #{key} in state #{state}" + end - # Set our actual id, and use for replacing any provisional statements using our existing node_id, which is provisional - raise JsonLdError::InvalidIdValue, - "value of @id must be a string: #{value.inspect}" if !value.is_a?(String) + type_scoped_context = context + as_array(value).sort.each do |term| + unless term.is_a?(String) + raise JsonLdError::InvalidTypeValue, + "value of @type must be a string: #{term.inspect}" + end + if type_scoped_context.term_definitions[term] + term_context = type_scoped_context.term_definitions[term].context + end + context = context.parse(term_context, base: base, propagate: false) unless term_context.nil? + type = type_scoped_context.expand_iri(term, + base: base, + documentRelative: true, + vocab: true) + + # Early terminate for @json + type = RDF.JSON if type == '@json' + # Add a provisional statement + provisional_statements << RDF::Statement(node_id, RDF.type, type) + end + state = :await_type + when '@id' + if is_list_or_set + raise JsonLdError::InvalidSetOrListObject, + "found #{key} in state #{state}" + end + unless %i[await_context await_type await_id].include?(state) + raise JsonLdError::CollidingKeywords, + "found #{key} in state #{state}" + end + + # Set our actual id, and use for replacing any provisional statements using our existing node_id, which is provisional + unless value.is_a?(String) + raise JsonLdError::InvalidIdValue, + "value of @id must be a string: #{value.inspect}" + end node_reference = input.keys.length == 1 - expanded_id = context.expand_iri(value, base: base, documentRelative: true) - next if expanded_id.nil? - new_node_id = as_resource(expanded_id) - # Replace and emit any statements including our provisional id with the newly established node (or graph) id - provisional_statements.each do |st| - st.subject = new_node_id if st.subject == node_id - st.object = new_node_id if st.object == node_id - st.graph_name = new_node_id if st.graph_name == node_id - block.call(st) - end + expanded_id = context.expand_iri(value, base: base, documentRelative: true) + next if expanded_id.nil? + + new_node_id = as_resource(expanded_id) + # Replace and emit any statements including our provisional id with the newly established node (or graph) id + provisional_statements.each do |st| + st.subject = new_node_id if st.subject == node_id + st.object = new_node_id if st.object == node_id + st.graph_name = new_node_id if st.graph_name == node_id + yield(st) + end - provisional_statements.clear - have_id, node_id = true, new_node_id + provisional_statements.clear + have_id = true + node_id = new_node_id - # if there's a subject & predicate, emit that statement now - if subject && predicate - st = RDF::Statement(subject, predicate, node_id) - block.call(st) - end - state = :properties - - when '@direction' - raise JsonLdError::InvalidStreamingKeyOrder, - "found @direction in state #{state}" if state == :properties - value_object['@direction'] = value - state = :await_id - when '@graph' - # If `@graph` is at the top level (no `subject`) and value contains no keys other than `@graph` and `@context`, add triples to the default graph - # Process all graph statements - parse_object(value, nil, context) do |st| - # If `@graph` is at the top level (`graph_is_named` is `false`) and input contains no keys other than `@graph` and `@context`, add triples to the default graph - relevant_keys = input.keys - ['@context', key] - st.graph_name = node_id unless !graph_is_named && relevant_keys.empty? - if st.graph_name && !st.graph_name.valid? - warn "skipping graph statement within invalid graph name: #{st.inspect}" + # if there's a subject & predicate, emit that statement now + if subject && predicate + st = RDF::Statement(subject, predicate, node_id) + yield(st) + end + state = :properties + + when '@direction' + if state == :properties + raise JsonLdError::InvalidStreamingKeyOrder, + "found @direction in state #{state}" + end + value_object['@direction'] = value + state = :await_id + when '@graph' + # If `@graph` is at the top level (no `subject`) and value contains no keys other than `@graph` and `@context`, add triples to the default graph + # Process all graph statements + parse_object(value, nil, context) do |st| + # If `@graph` is at the top level (`graph_is_named` is `false`) and input contains no keys other than `@graph` and `@context`, add triples to the default graph + relevant_keys = input.keys - ['@context', key] + st.graph_name = node_id unless !graph_is_named && relevant_keys.empty? + if st.graph_name && !st.graph_name.valid? + warn "skipping graph statement within invalid graph name: #{st.inspect}" + else + add_statement.call(st) + end + end + state = :await_id unless state == :properties + when '@included' + # Expanded values must be node objects + have_statements = false + parse_object(value, active_property, context) do |st| + have_statements ||= st.subject? + yield(st) + end + unless have_statements + raise JsonLdError::InvalidIncludedValue, + "values of @included must expand to node objects" + end + + state = :await_id unless state == :properties + when '@index' + state = :await_id unless state == :properties + unless value.is_a?(String) + raise JsonLdError::InvalidIndexValue, + "Value of @index is not a string: #{value.inspect}" + end + when '@language' + if state == :properties + raise JsonLdError::InvalidStreamingKeyOrder, + "found @language in state #{state}" + end + unless value.is_a?(String) + raise JsonLdError::InvalidLanguageTaggedString, + "@language value must be a string: #{value.inspect}" + end + unless /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/.match?(value) + warn "@language must be valid BCP47: #{value.inspect}" + return + end + language = value + state = :await_id + when '@list' + unless %i[await_context await_type await_id].include?(state) + raise JsonLdError::InvalidSetOrListObject, + "found #{key} in state #{state}" + end + is_list_or_set = true + node_id = parse_list(value, active_property, context, &block) if subject + state = :properties + when '@nest' + if context.term_definitions[active_property] + nest_context = context.term_definitions[active_property].context + end + nest_context = if nest_context.nil? + context else - add_statement.call(st) + context.parse(nest_context, base: base, override_protected: true) end - end - state = :await_id unless state == :properties - when '@included' - # Expanded values must be node objects - have_statements = false - parse_object(value, active_property, context) do |st| - have_statements ||= st.subject? - block.call(st) - end - raise JsonLdError::InvalidIncludedValue, "values of @included must expand to node objects" unless have_statements - state = :await_id unless state == :properties - when '@index' - state = :await_id unless state == :properties - raise JsonLdError::InvalidIndexValue, - "Value of @index is not a string: #{value.inspect}" unless value.is_a?(String) - when '@language' - raise JsonLdError::InvalidStreamingKeyOrder, - "found @language in state #{state}" if state == :properties - raise JsonLdError::InvalidLanguageTaggedString, - "@language value must be a string: #{value.inspect}" if !value.is_a?(String) - if value !~ /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/ - warn "@language must be valid BCP47: #{value.inspect}" - return - end - language = value - state = :await_id - when '@list' - raise JsonLdError::InvalidSetOrListObject, - "found #{key} in state #{state}" if - !%i(await_context await_type await_id).include?(state) - is_list_or_set = true - if subject - node_id = parse_list(value, active_property, context, &block) - end - state = :properties - when '@nest' - nest_context = context.term_definitions[active_property].context if context.term_definitions[active_property] - nest_context = if nest_context.nil? - context - else - context.parse(nest_context, base: base, override_protected: true) - end - as_array(value).each do |v| - raise JsonLdError::InvalidNestValue, v.inspect unless - v.is_a?(Hash) && v.keys.none? {|k| nest_context.expand_iri(k, vocab: true, base: base) == '@value'} + as_array(value).each do |v| + raise JsonLdError::InvalidNestValue, v.inspect unless + v.is_a?(Hash) && v.keys.none? { |k| nest_context.expand_iri(k, vocab: true, base: base) == '@value' } + parse_object(v, active_property, nest_context, node_id: node_id) do |st| add_statement.call(st) end - end - state = :await_id unless state == :properties - when '@reverse' - as_array(value).each do |item| - item = context.expand_value(active_property, item, base: base) unless item.is_a?(Hash) - raise JsonLdError::InvalidReverseValue, item.inspect if value?(item) - raise JsonLdError::InvalidReversePropertyMap, item.inspect if node_reference?(item) - raise JsonLdError::InvalidReversePropertyValue, item.inspect if list?(item) - has_own_subject = false - parse_object(item, active_property, context, node_id: node_id, predicate: predicate) do |st| - if st.subject == node_id - raise JsonLdError::InvalidReversePropertyValue, item.inspect if !st.object.resource? - # Invert sense of statements - st = RDF::Statement(st.object, st.predicate, st.subject) - has_own_subject = true + end + state = :await_id unless state == :properties + when '@reverse' + as_array(value).each do |item| + item = context.expand_value(active_property, item, base: base) unless item.is_a?(Hash) + raise JsonLdError::InvalidReverseValue, item.inspect if value?(item) + raise JsonLdError::InvalidReversePropertyMap, item.inspect if node_reference?(item) + raise JsonLdError::InvalidReversePropertyValue, item.inspect if list?(item) + + has_own_subject = false + parse_object(item, active_property, context, node_id: node_id, predicate: predicate) do |st| + if st.subject == node_id + raise JsonLdError::InvalidReversePropertyValue, item.inspect unless st.object.resource? + + # Invert sense of statements + st = RDF::Statement(st.object, st.predicate, st.subject) + has_own_subject = true + end + add_statement.call(st) end - add_statement.call(st) + + # If the reversed node does not make any claims on this subject, it's an error + raise JsonLdError::InvalidReversePropertyValue, item.inspect unless has_own_subject + end + state = :await_id unless state == :properties + when '@set' + unless %i[await_context await_type await_id].include?(state) + raise JsonLdError::InvalidSetOrListObject, + "found #{key} in state #{state}" + end + is_list_or_set = true + value = as_array(value).compact + parse_object(value, active_property, context, subject: subject, predicate: predicate, &block) + node_id = nil + state = :properties + when '@value' + if state == :properties + raise JsonLdError::InvalidStreamingKeyOrder, + "found @value in state #{state}" end + value_object['@value'] = value + state = :await_id + else + state = :await_id unless state == :properties + # Skip keys that don't expand to a keyword or absolute IRI + next if expanded_key.is_a?(RDF::URI) && !expanded_key.absolute? - # If the reversed node does not make any claims on this subject, it's an error - raise JsonLdError::InvalidReversePropertyValue, item.inspect unless has_own_subject - end - state = :await_id unless state == :properties - when '@set' - raise JsonLdError::InvalidSetOrListObject, - "found #{key} in state #{state}" if - !%i(await_context await_type await_id).include?(state) - is_list_or_set = true - value = as_array(value).compact - parse_object(value, active_property, context, subject: subject, predicate: predicate, &block) - node_id = nil - state = :properties - when '@value' - raise JsonLdError::InvalidStreamingKeyOrder, - "found @value in state #{state}" if state == :properties - value_object['@value'] = value - state = :await_id - else - state = :await_id unless state == :properties - # Skip keys that don't expand to a keyword or absolute IRI - next if expanded_key.is_a?(RDF::URI) && !expanded_key.absolute? - parse_property(value, key, context, node_id, expanded_key) do |st| - add_statement.call(st) + parse_property(value, key, context, node_id, expanded_key) do |st| + add_statement.call(st) + end end end - end - # Value object with @id - raise JsonLdError::InvalidValueObject, - "value object has unknown key: @id" if - !value_object.empty? && (have_id || is_list_or_set) - - # Can't have both @id and either @list or @set - raise JsonLdError::InvalidSetOrListObject, - "found @id with @list or @set" if - have_id && is_list_or_set - - type_statements = provisional_statements.select {|ps| ps.predicate == RDF.type && ps.graph_name.nil?} - value_object['@language'] = (@options[:lowercaseLanguage] ? language.downcase : language) if language - if !value_object.empty? && - (!value_object['@value'].nil? || - (type_statements.first || RDF::Statement.new).object == RDF.JSON) - - # There can be only one value of @type - case type_statements.length - when 0 then #skip - when 1 - raise JsonLdError::InvalidTypedValue, - "value of @type must be an IRI or '@json': #{type_statements.first.object.inspect}" unless - type_statements.first.object.valid? - value_object['@type'] = type_statements.first.object - else + # Value object with @id + if !value_object.empty? && (have_id || is_list_or_set) raise JsonLdError::InvalidValueObject, - "value object must not have more than one type" + "value object has unknown key: @id" + end + + # Can't have both @id and either @list or @set + if have_id && is_list_or_set + raise JsonLdError::InvalidSetOrListObject, + "found @id with @list or @set" end - # Check for extra keys - raise JsonLdError::InvalidValueObject, - "value object has unknown keys: #{value_object.inspect}" unless - (value_object.keys - Expand::KEYS_VALUE_LANGUAGE_TYPE_INDEX_DIRECTION).empty? - - # @type is inconsistent with either @language or @direction - raise JsonLdError::InvalidValueObject, - "value object must not include @type with either " + - "@language or @direction: #{value_object.inspect}" if - value_object.keys.include?('@type') && !(value_object.keys & %w(@language @direction)).empty? - - if value_object.key?('@language') && !value_object['@value'].is_a?(String) - raise JsonLdError::InvalidLanguageTaggedValue, - "with @language @value must be a string: #{value_object.inspect}" - elsif value_object['@type'] && value_object['@type'] != RDF.JSON - raise JsonLdError::InvalidTypedValue, - "value of @type must be an IRI or '@json': #{value_object['@type'].inspect}" unless - value_object['@type'].is_a?(RDF::URI) - elsif value_object['@type'] != RDF.JSON - case value_object['@value'] - when String, TrueClass, FalseClass, Numeric then # okay + type_statements = provisional_statements.select { |ps| ps.predicate == RDF.type && ps.graph_name.nil? } + value_object['@language'] = (@options[:lowercaseLanguage] ? language.downcase : language) if language + if !value_object.empty? && + (!value_object['@value'].nil? || + (type_statements.first || RDF::Statement.new).object == RDF.JSON) + + # There can be only one value of @type + case type_statements.length + when 0 # skip + when 1 + unless type_statements.first.object.valid? + raise JsonLdError::InvalidTypedValue, + "value of @type must be an IRI or '@json': #{type_statements.first.object.inspect}" + end + value_object['@type'] = type_statements.first.object else - raise JsonLdError::InvalidValueObjectValue, - "@value is: #{value_object['@value'].inspect}" + raise JsonLdError::InvalidValueObject, + "value object must not have more than one type" end - end - literal = item_to_rdf(value_object, &block) - st = RDF::Statement(subject, predicate, literal) - block.call(st) - elsif !provisional_statements.empty? - # Emit all provisional statements, as no @id was ever found - provisional_statements.each {|st| block.call(st)} - end - # Use implicit subject to generate the relationship - if value_object.empty? && subject && predicate && !have_id && !node_reference - block.call(RDF::Statement(subject, predicate, node_id)) - end - end + # Check for extra keys + unless (value_object.keys - Expand::KEYS_VALUE_LANGUAGE_TYPE_INDEX_DIRECTION).empty? + raise JsonLdError::InvalidValueObject, + "value object has unknown keys: #{value_object.inspect}" + end - def parse_property(input, active_property, context, subject, predicate, &block) - container = context.container(active_property) - if container.include?('@language') && input.is_a?(Hash) - input.each do |lang, lang_value| - expanded_lang = context.expand_iri(lang, vocab: true) - if lang !~ /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/ && expanded_lang != '@none' - warn "@language must be valid BCP47: #{lang.inspect}" + # @type is inconsistent with either @language or @direction + if value_object.key?('@type') && !(value_object.keys & %w[@language @direction]).empty? + raise JsonLdError::InvalidValueObject, + "value object must not include @type with either " \ + "@language or @direction: #{value_object.inspect}" end - as_array(lang_value).each do |item| - raise JsonLdError::InvalidLanguageMapValue, - "Expected #{item.inspect} to be a string" unless item.nil? || item.is_a?(String) - lang_obj = {'@value' => item} - lang_obj['@language'] = lang unless expanded_lang == '@none' - lang_obj['@direction'] = context.direction(lang) if context.direction(lang) - parse_object(lang_obj, active_property, context, subject: subject, predicate: predicate, &block) + if value_object.key?('@language') && !value_object['@value'].is_a?(String) + raise JsonLdError::InvalidLanguageTaggedValue, + "with @language @value must be a string: #{value_object.inspect}" + elsif value_object['@type'] && value_object['@type'] != RDF.JSON + unless value_object['@type'].is_a?(RDF::URI) + raise JsonLdError::InvalidTypedValue, + "value of @type must be an IRI or '@json': #{value_object['@type'].inspect}" + end + elsif value_object['@type'] != RDF.JSON + case value_object['@value'] + when String, TrueClass, FalseClass, Numeric # okay + else + raise JsonLdError::InvalidValueObjectValue, + "@value is: #{value_object['@value'].inspect}" + end end + literal = item_to_rdf(value_object, &block) + st = RDF::Statement(subject, predicate, literal) + yield(st) + elsif !provisional_statements.empty? + # Emit all provisional statements, as no @id was ever found + provisional_statements.each(&block) end - elsif container.include?('@list') - # Handle case where value is a list object - if input.is_a?(Hash) && - input.keys.map do |k| - context.expand_iri(k, vocab: true, as_string: true, base: base) - end.include?('@list') - parse_object(input, active_property, context, - subject: subject, predicate: predicate, &block) - else - list = parse_list(input, active_property, context, &block) - block.call(RDF::Statement(subject, predicate, list)) - end - elsif container.intersect?(JSON::LD::Expand::CONTAINER_INDEX_ID_TYPE) && input.is_a?(Hash) - # Get appropriate context for this container - container_context = if container.include?('@type') && context.previous_context - context.previous_context - elsif container.include?('@id') && context.term_definitions[active_property] - id_context = context.term_definitions[active_property].context if context.term_definitions[active_property] - if id_context.nil? - context + + # Use implicit subject to generate the relationship + return unless value_object.empty? && subject && predicate && !have_id && !node_reference + + yield(RDF::Statement(subject, predicate, node_id)) + end + + def parse_property(input, active_property, context, subject, predicate, &block) + container = context.container(active_property) + if container.include?('@language') && input.is_a?(Hash) + input.each do |lang, lang_value| + expanded_lang = context.expand_iri(lang, vocab: true) + if lang !~ /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/ && expanded_lang != '@none' + warn "@language must be valid BCP47: #{lang.inspect}" + end + + as_array(lang_value).each do |item| + unless item.nil? || item.is_a?(String) + raise JsonLdError::InvalidLanguageMapValue, + "Expected #{item.inspect} to be a string" + end + lang_obj = { '@value' => item } + lang_obj['@language'] = lang unless expanded_lang == '@none' + lang_obj['@direction'] = context.direction(lang) if context.direction(lang) + parse_object(lang_obj, active_property, context, subject: subject, predicate: predicate, &block) + end + end + elsif container.include?('@list') + # Handle case where value is a list object + if input.is_a?(Hash) && + input.keys.map do |k| + context.expand_iri(k, vocab: true, as_string: true, base: base) + end.include?('@list') + parse_object(input, active_property, context, + subject: subject, predicate: predicate, &block) else - context.parse(id_context, base: base, propagate: false) + list = parse_list(input, active_property, context, &block) + yield(RDF::Statement(subject, predicate, list)) end - else - context - end - - input.each do |k, v| - # If container mapping in the active context includes @type, and k is a term in the active context having a local context, use that context when expanding values - map_context = container_context.term_definitions[k].context if - container.include?('@type') && container_context.term_definitions[k] - unless map_context.nil? - map_context = container_context.parse(map_context, base: base, propagate: false) + elsif container.intersect?(JSON::LD::Expand::CONTAINER_INDEX_ID_TYPE) && input.is_a?(Hash) + # Get appropriate context for this container + container_context = if container.include?('@type') && context.previous_context + context.previous_context + elsif container.include?('@id') && context.term_definitions[active_property] + id_context = context.term_definitions[active_property].context if context.term_definitions[active_property] + if id_context.nil? + context + else + context.parse(id_context, base: base, propagate: false) + end + else + context end - map_context ||= container_context - - expanded_k = container_context.expand_iri(k, vocab: true, as_string: true, base: base) - index_key = context.term_definitions[active_property].index || '@index' - - case - when container.include?('@index') && container.include?('@graph') - # Index is ignored - as_array(v).each do |item| - # Each value is in a separate graph - graph_name = RDF::Node.new(namer.get_sym) - parse_object(item, active_property, context) do |st| - st.graph_name ||= graph_name - block.call(st) - end - block.call(RDF::Statement(subject, predicate, graph_name)) - # Add a property index, if appropriate - unless index_key == '@index' + input.each do |k, v| + # If container mapping in the active context includes @type, and k is a term in the active context having a local context, use that context when expanding values + map_context = container_context.term_definitions[k].context if + container.include?('@type') && container_context.term_definitions[k] + map_context = container_context.parse(map_context, base: base, propagate: false) unless map_context.nil? + map_context ||= container_context + + expanded_k = container_context.expand_iri(k, vocab: true, as_string: true, base: base) + index_key = context.term_definitions[active_property].index || '@index' + + if container.include?('@index') && container.include?('@graph') + # Index is ignored + as_array(v).each do |item| + # Each value is in a separate graph + graph_name = RDF::Node.new(namer.get_sym) + parse_object(item, active_property, context) do |st| + st.graph_name ||= graph_name + yield(st) + end + yield(RDF::Statement(subject, predicate, graph_name)) + + # Add a property index, if appropriate + next if index_key == '@index' + # Expand key based on term - expanded_k = k == '@none' ? - '@none' : + expanded_k = if k == '@none' + '@none' + else container_context.expand_value(index_key, k, base: base) + end # Add the index property as a property of the graph name index_property = container_context.expand_iri(index_key, vocab: true, base: base) - emit_object(expanded_k, index_key, map_context, graph_name, - index_property, from_map: true, &block) unless - expanded_k == '@none' + unless expanded_k == '@none' + emit_object(expanded_k, index_key, map_context, graph_name, + index_property, from_map: true, &block) + end end - end - when container.include?('@index') - if index_key == '@index' - # Index is ignored - emit_object(v, active_property, map_context, subject, predicate, from_map: true, &block) - else - # Expand key based on term - expanded_k = k == '@none' ? - '@none' : - container_context.expand_value(index_key, k, base: base) - - index_property = container_context.expand_iri(index_key, vocab: true, as_string: true, base: base) - - # index_key is a property - as_array(v).each do |item| - item = container_context.expand_value(active_property, item, base: base) if item.is_a?(String) - raise JsonLdError::InvalidValueObject, - "Attempt to add illegal key to value object: #{index_key}" if value?(item) - # add expanded_k as value of index_property in item - item[index_property] = [expanded_k].concat(Array(item[index_property])) unless expanded_k == '@none' - emit_object(item, active_property, map_context, subject, predicate, from_map: true, &block) + elsif container.include?('@index') + if index_key == '@index' + # Index is ignored + emit_object(v, active_property, map_context, subject, predicate, from_map: true, &block) + else + # Expand key based on term + expanded_k = if k == '@none' + '@none' + else + container_context.expand_value(index_key, k, base: base) + end + + index_property = container_context.expand_iri(index_key, vocab: true, as_string: true, base: base) + + # index_key is a property + as_array(v).each do |item| + item = container_context.expand_value(active_property, item, base: base) if item.is_a?(String) + if value?(item) + raise JsonLdError::InvalidValueObject, + "Attempt to add illegal key to value object: #{index_key}" + end + # add expanded_k as value of index_property in item + item[index_property] = [expanded_k].concat(Array(item[index_property])) unless expanded_k == '@none' + emit_object(item, active_property, map_context, subject, predicate, from_map: true, &block) + end + end + elsif container.include?('@id') && container.include?('@graph') + graph_name = if expanded_k == '@none' + RDF::Node.new(namer.get_sym) + else + container_context.expand_iri(k, documentRelative: true, base: base) end + parse_object(v, active_property, context) do |st| + st.graph_name ||= graph_name + yield(st) + end + yield(RDF::Statement(subject, predicate, graph_name)) + elsif container.include?('@id') + expanded_k = container_context.expand_iri(k, documentRelative: true, base: base) + # pass our id + emit_object(v, active_property, map_context, subject, predicate, + node_id: (expanded_k unless expanded_k == '@none'), + from_map: true, + &block) + elsif container.include?('@type') + emit_object(v, active_property, map_context, subject, predicate, + from_map: true, + extra_type: as_resource(expanded_k), + &block) end - when container.include?('@id') && container.include?('@graph') - graph_name = expanded_k == '@none' ? - RDF::Node.new(namer.get_sym) : - container_context.expand_iri(k, documentRelative: true, base: base) + end + elsif container.include?('@graph') + # Index is ignored + as_array(input).each do |v| + # Each value is in a separate graph + graph_name = RDF::Node.new(namer.get_sym) parse_object(v, active_property, context) do |st| st.graph_name ||= graph_name - block.call(st) - end - block.call(RDF::Statement(subject, predicate, graph_name)) - when container.include?('@id') - expanded_k = container_context.expand_iri(k, documentRelative: true, base: base) - # pass our id - emit_object(v, active_property, map_context, subject, predicate, - node_id: (expanded_k unless expanded_k == '@none'), - from_map: true, - &block) - when container.include?('@type') - emit_object(v, active_property, map_context, subject, predicate, - from_map: true, - extra_type: as_resource(expanded_k), - &block) - end - end - elsif container.include?('@graph') - # Index is ignored - as_array(input).each do |v| - # Each value is in a separate graph - graph_name = RDF::Node.new(namer.get_sym) - parse_object(v, active_property, context) do |st| - st.graph_name ||= graph_name - block.call(st) + yield(st) + end + yield(RDF::Statement(subject, predicate, graph_name)) end - block.call(RDF::Statement(subject, predicate, graph_name)) + else + emit_object(input, active_property, context, subject, predicate, &block) end - else - emit_object(input, active_property, context, subject, predicate, &block) end - end - # Wrapps parse_object to handle JSON literals and reversed properties - def emit_object(input, active_property, context, subject, predicate, **options, &block) - if context.coerce(active_property) == '@json' - parse_object(context.expand_value(active_property, input), active_property, context, - subject: subject, predicate: predicate, **options, &block) - elsif context.reverse?(active_property) - as_array(input).each do |item| - item = context.expand_value(active_property, item, base: base) unless item.is_a?(Hash) - raise JsonLdError::InvalidReverseValue, item.inspect if value?(item) - raise JsonLdError::InvalidReversePropertyValue, item.inspect if list?(item) - has_own_subject = false - parse_object(item, active_property, context, subject: subject, predicate: predicate, **options) do |st| - if st.subject == subject - raise JsonLdError::InvalidReversePropertyValue, item.inspect if !st.object.resource? - # Invert sense of statements - st = RDF::Statement(st.object, st.predicate, st.subject) - has_own_subject = true - end - block.call(st) - end + # Wrapps parse_object to handle JSON literals and reversed properties + def emit_object(input, active_property, context, subject, predicate, **options, &block) + if context.coerce(active_property) == '@json' + parse_object(context.expand_value(active_property, input), active_property, context, + subject: subject, predicate: predicate, **options, &block) + elsif context.reverse?(active_property) + as_array(input).each do |item| + item = context.expand_value(active_property, item, base: base) unless item.is_a?(Hash) + raise JsonLdError::InvalidReverseValue, item.inspect if value?(item) + raise JsonLdError::InvalidReversePropertyValue, item.inspect if list?(item) - # If the reversed node does not make any claims on this subject, it's an error - raise JsonLdError::InvalidReversePropertyValue, - "@reverse value must be a node: #{value.inspect}" unless has_own_subject - end - else - as_array(input).flatten.each do |item| - # emit property/value - parse_object(item, active_property, context, - subject: subject, predicate: predicate, **options, &block) + has_own_subject = false + parse_object(item, active_property, context, subject: subject, predicate: predicate, **options) do |st| + if st.subject == subject + raise JsonLdError::InvalidReversePropertyValue, item.inspect unless st.object.resource? + + # Invert sense of statements + st = RDF::Statement(st.object, st.predicate, st.subject) + has_own_subject = true + end + yield(st) + end + + # If the reversed node does not make any claims on this subject, it's an error + unless has_own_subject + raise JsonLdError::InvalidReversePropertyValue, + "@reverse value must be a node: #{value.inspect}" + end + end + else + as_array(input).flatten.each do |item| + # emit property/value + parse_object(item, active_property, context, + subject: subject, predicate: predicate, **options, &block) + end end end - end - # Process input as an ordered list - # @return [RDF::Resource] the list head - def parse_list(input, active_property, context, &block) - # Transform all entries into their values - # this allows us to eliminate those that don't create any statements - fake_subject = RDF::Node.new - values = as_array(input).map do |entry| - if entry.is_a?(Array) - # recursive list - entry_value = parse_list(entry, active_property, context, &block) - else - entry_value = nil - parse_object(entry, active_property, context, subject: fake_subject, predicate: RDF.first) do |st| - if st.subject == fake_subject - entry_value = st.object - else - block.call(st) + # Process input as an ordered list + # @return [RDF::Resource] the list head + def parse_list(input, active_property, context, &block) + # Transform all entries into their values + # this allows us to eliminate those that don't create any statements + fake_subject = RDF::Node.new + values = as_array(input).map do |entry| + if entry.is_a?(Array) + # recursive list + entry_value = parse_list(entry, active_property, context, &block) + else + entry_value = nil + parse_object(entry, active_property, context, subject: fake_subject, predicate: RDF.first) do |st| + if st.subject == fake_subject + entry_value = st.object + else + yield(st) + end end + entry_value end - entry_value - end - end.compact - return RDF.nil if values.empty? + end.compact + return RDF.nil if values.empty? - # Construct a list from values, and emit list statements, returning the list subject - list = RDF::List(*values) - list.each_statement(&block) - return list.subject + # Construct a list from values, and emit list statements, returning the list subject + list = RDF::List(*values) + list.each_statement(&block) + list.subject + end end end -end \ No newline at end of file +end diff --git a/lib/json/ld/streaming_writer.rb b/lib/json/ld/streaming_writer.rb index e3238c98..2211729d 100644 --- a/lib/json/ld/streaming_writer.rb +++ b/lib/json/ld/streaming_writer.rb @@ -1,116 +1,117 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true -module JSON::LD - ## - # Streaming writer interface. - # - # Writes an array of statements serialized in expanded JSON-LD. No provision for turning rdf:first/rest into @list encodings. - # @author [Gregg Kellogg](http://greggkellogg.net/) - module StreamingWriter - ## - # Write out array start, and note not to prepend node-separating ',' - # @return [void] `self` - def stream_prologue - - # If we were provided a context, or prefixes, use them to compact the output - @context = case @options[:context] - when nil then nil - when Context then @options[:context] - else Context.parse(@options[:context]) - end - - # log_debug("prologue") {"context: #{context.inspect}"} - if context - @output.puts %({"@context": #{context.serialize['@context'].to_json}, "@graph": [) - else - @output.puts "[" - end - self - end +module JSON + module LD ## - # Write a statement, creating a current node definition, if necessary. - # - # Once a new/first statement is seen, terminate the current node definition and compact if provided a context. - # - # Also expects all statements in the same context to be contained in a block including all subjects in a block (except for list elements) + # Streaming writer interface. # - # Note that if list elements are not received in order using the same subject and property, this may cause a bad serialization. - # - # @return [void] `self` - def stream_statement(statement) - # log_debug("ss") {"state: #{@state.inspect}, stmt: #{statement}"} - if @current_graph != statement.graph_name - end_graph - start_graph(statement.graph_name) + # Writes an array of statements serialized in expanded JSON-LD. No provision for turning rdf:first/rest into @list encodings. + # @author [Gregg Kellogg](http://greggkellogg.net/) + module StreamingWriter + ## + # Write out array start, and note not to prepend node-separating ',' + # @return [void] `self` + def stream_prologue + # If we were provided a context, or prefixes, use them to compact the output + @context = case @options[:context] + when nil then nil + when Context then @options[:context] + else Context.parse(@options[:context]) + end + + # log_debug("prologue") {"context: #{context.inspect}"} + if context + @output.puts %({"@context": #{context.serialize['@context'].to_json}, "@graph": [) + else + @output.puts "[" + end + self end - # If we're writing a list - @current_node_def ||= {'@id' => statement.subject.to_s} + ## + # Write a statement, creating a current node definition, if necessary. + # + # Once a new/first statement is seen, terminate the current node definition and compact if provided a context. + # + # Also expects all statements in the same context to be contained in a block including all subjects in a block (except for list elements) + # + # Note that if list elements are not received in order using the same subject and property, this may cause a bad serialization. + # + # @return [void] `self` + def stream_statement(statement) + # log_debug("ss") {"state: #{@state.inspect}, stmt: #{statement}"} + if @current_graph != statement.graph_name + end_graph + start_graph(statement.graph_name) + end - if statement.subject.to_s != @current_node_def['@id'] - end_node - @current_node_def = {'@id' => statement.subject.to_s} - end + # If we're writing a list + @current_node_def ||= { '@id' => statement.subject.to_s } - if statement.predicate == RDF.type - (@current_node_def['@type'] ||= []) << statement.object.to_s - else - pd = (@current_node_def[statement.predicate.to_s] ||= []) + if statement.subject.to_s != @current_node_def['@id'] + end_node + @current_node_def = { '@id' => statement.subject.to_s } + end - pd << if statement.object.resource? - {'@id' => statement.object.to_s} - elsif statement.object.datatype == RDF::URI(RDF.to_uri + "JSON") - {"@value" => MultiJson.load(statement.object.to_s), "@type" => "@json"} + if statement.predicate == RDF.type + (@current_node_def['@type'] ||= []) << statement.object.to_s else - lit = {"@value" => statement.object.to_s} - lit["@type"] = statement.object.datatype.to_s if statement.object.datatype? - lit["@language"] = statement.object.language.to_s if statement.object.language? - lit + pd = (@current_node_def[statement.predicate.to_s] ||= []) + + pd << if statement.object.resource? + { '@id' => statement.object.to_s } + elsif statement.object.datatype == RDF_JSON + { "@value" => MultiJson.load(statement.object.to_s), "@type" => "@json" } + else + lit = { "@value" => statement.object.to_s } + lit["@type"] = statement.object.datatype.to_s if statement.object.datatype? + lit["@language"] = statement.object.language.to_s if statement.object.language? + lit + end end + self end - self - end - ## - # Complete open statements - # @return [void] `self` - def stream_epilogue - # log_debug("epilogue") {"state: #{@state.inspect}"} - end_graph - if context - @output.puts "\n]}" - else - @output.puts "\n]" + ## + # Complete open statements + # @return [void] `self` + def stream_epilogue + # log_debug("epilogue") {"state: #{@state.inspect}"} + end_graph + if context + @output.puts "\n]}" + else + @output.puts "\n]" + end + self end - self - end - private - - def start_graph(resource) - # log_debug("start_graph") {"state: #{@state.inspect}, resource: #{resource}"} - if resource - @output.puts(",") if %i(wrote_node wrote_graph).include?(@state) - @output.puts %({"@id": "#{resource}", "@graph": [) - @state = :in_graph + private + + def start_graph(resource) + # log_debug("start_graph") {"state: #{@state.inspect}, resource: #{resource}"} + if resource + @output.puts(",") if %i[wrote_node wrote_graph].include?(@state) + @output.puts %({"@id": "#{resource}", "@graph": [) + @state = :in_graph + end + @current_graph = resource end - @current_graph = resource - end - def end_graph - # log_debug("end_graph") {"state: #{@state.inspect}, ctx: #{@current_graph}"} - end_node - if @current_graph + def end_graph + # log_debug("end_graph") {"state: #{@state.inspect}, ctx: #{@current_graph}"} + end_node + return unless @current_graph + @output.write %(]}) @state = :wrote_graph end - end - def end_node - # log_debug("end_node") {"state: #{@state.inspect}, node: #{@current_node_def.to_json}"} - @output.puts(",") if %i(wrote_node wrote_graph).include?(@state) - if @current_node_def + def end_node + # log_debug("end_node") {"state: #{@state.inspect}, node: #{@current_node_def.to_json}"} + @output.puts(",") if %i[wrote_node wrote_graph].include?(@state) + return unless @current_node_def + node_def = if context compacted = JSON::LD::API.compact(@current_node_def, context, rename_bnodes: false, **@options) compacted.delete('@context') diff --git a/lib/json/ld/to_rdf.rb b/lib/json/ld/to_rdf.rb index c7172923..b336c5e5 100644 --- a/lib/json/ld/to_rdf.rb +++ b/lib/json/ld/to_rdf.rb @@ -1,194 +1,196 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true + require 'rdf' require 'rdf/nquads' require 'json/canonicalization' -module JSON::LD - module ToRDF - include Utils - - ## - # @param [Hash{String => Object}] item - # @param [RDF::Resource] graph_name - # @param [Boolean] quoted emitted triples are quoted triples. - # @yield statement - # @yieldparam [RDF::Statement] statement - # @return RDF::Resource the subject of this item - def item_to_rdf(item, graph_name: nil, quoted: false, &block) - # Just return value object as Term - return unless item - - if value?(item) - value, datatype = item.fetch('@value'), item.fetch('@type', nil) - - datatype = RDF::URI(RDF.to_uri + "JSON") if datatype == '@json' - - case value - when RDF::Value - return value - when TrueClass, FalseClass - # If value is true or false, then set value its canonical lexical form as defined in the section Data Round Tripping. If datatype is null, set it to xsd:boolean. - value = value.to_s - datatype ||= RDF::XSD.boolean.to_s - when Numeric - # Otherwise, if value is a number, then set value to its canonical lexical form as defined in the section Data Round Tripping. If datatype is null, set it to either xsd:integer or xsd:double, depending on if the value contains a fractional and/or an exponential component. - value = if datatype == RDF::URI(RDF.to_uri + "JSON") - value.to_json_c14n - else - # Don't serialize as double if there are no fractional bits - as_double = value.ceil != value || value >= 1e21 || datatype == RDF::XSD.double - lit = if as_double - RDF::Literal::Double.new(value, canonicalize: true) +module JSON + module LD + module ToRDF + include Utils + + ## + # @param [Hash{String => Object}] item + # @param [RDF::Resource] graph_name + # @param [Boolean] quoted emitted triples are quoted triples. + # @yield statement + # @yieldparam [RDF::Statement] statement + # @return RDF::Resource the subject of this item + def item_to_rdf(item, graph_name: nil, quoted: false, &block) + # Just return value object as Term + return unless item + + if value?(item) + value = item.fetch('@value') + datatype = item.fetch('@type', nil) + + datatype = RDF_JSON if datatype == '@json' + + case value + when RDF::Value + return value + when TrueClass, FalseClass + # If value is true or false, then set value its canonical lexical form as defined in the section Data Round Tripping. If datatype is null, set it to xsd:boolean. + value = value.to_s + datatype ||= RDF::XSD.boolean.to_s + when Numeric + # Otherwise, if value is a number, then set value to its canonical lexical form as defined in the section Data Round Tripping. If datatype is null, set it to either xsd:integer or xsd:double, depending on if the value contains a fractional and/or an exponential component. + value = if datatype == RDF_JSON + value.to_json_c14n else - RDF::Literal.new(value.numerator, canonicalize: true) + # Don't serialize as double if there are no fractional bits + as_double = value.ceil != value || value >= 1e21 || datatype == RDF::XSD.double + lit = if as_double + RDF::Literal::Double.new(value, canonicalize: true) + else + RDF::Literal.new(value.numerator, canonicalize: true) + end + + datatype ||= lit.datatype + lit.to_s.sub("E+", "E") end - - datatype ||= lit.datatype - lit.to_s.sub("E+", "E") - end - when Array, Hash - # Only valid for rdf:JSON - value = value.to_json_c14n - else - if item.key?('@direction') && @options[:rdfDirection] - # Either serialize using a datatype, or a compound-literal - case @options[:rdfDirection] - when 'i18n-datatype' - datatype = RDF::URI("https://www.w3.org/ns/i18n##{item.fetch('@language', '').downcase}_#{item['@direction']}") - when 'compound-literal' - cl = RDF::Node.new - yield RDF::Statement(cl, RDF.value, item['@value'].to_s) - yield RDF::Statement(cl, RDF.to_uri + 'language', item['@language'].downcase) if item['@language'] - yield RDF::Statement(cl, RDF.to_uri + 'direction', item['@direction']) - return cl + when Array, Hash + # Only valid for rdf:JSON + value = value.to_json_c14n + else + if item.key?('@direction') && @options[:rdfDirection] + # Either serialize using a datatype, or a compound-literal + case @options[:rdfDirection] + when 'i18n-datatype' + datatype = RDF::URI("https://www.w3.org/ns/i18n##{item.fetch('@language', + '').downcase}_#{item['@direction']}") + when 'compound-literal' + cl = RDF::Node.new + yield RDF::Statement(cl, RDF.value, item['@value'].to_s) + yield RDF::Statement(cl, RDF_LANGUAGE, item['@language'].downcase) if item['@language'] + yield RDF::Statement(cl, RDF_DIRECTION, item['@direction']) + return cl + end end - end - # Otherwise, if datatype is null, set it to xsd:string or xsd:langString, depending on if item has a @language key. - datatype ||= item.key?('@language') ? RDF.langString : RDF::XSD.string - if datatype == RDF::URI(RDF.to_uri + "JSON") - value = value.to_json_c14n + # Otherwise, if datatype is null, set it to xsd:string or xsd:langString, depending on if item has a @language key. + datatype ||= item.key?('@language') ? RDF.langString : RDF::XSD.string + value = value.to_json_c14n if datatype == RDF_JSON end + datatype = RDF::URI(datatype) if datatype && !datatype.is_a?(RDF::URI) + + # Initialize literal as an RDF literal using value and datatype. If element has the key @language and datatype is xsd:string, then add the value associated with the @language key as the language of the object. + language = item.fetch('@language', nil) if datatype == RDF.langString + return RDF::Literal.new(value, datatype: datatype, language: language) + elsif list?(item) + # If item is a list object, initialize list_results as an empty array, and object to the result of the List Conversion algorithm, passing the value associated with the @list key from item and list_results. + return parse_list(item['@list'], graph_name: graph_name, &block) end - datatype = RDF::URI(datatype) if datatype && !datatype.is_a?(RDF::URI) - - # Initialize literal as an RDF literal using value and datatype. If element has the key @language and datatype is xsd:string, then add the value associated with the @language key as the language of the object. - language = item.fetch('@language', nil) if datatype == RDF.langString - return RDF::Literal.new(value, datatype: datatype, language: language) - elsif list?(item) - # If item is a list object, initialize list_results as an empty array, and object to the result of the List Conversion algorithm, passing the value associated with the @list key from item and list_results. - return parse_list(item['@list'], graph_name: graph_name, &block) - end - subject = case item['@id'] - when nil then node - when String then as_resource(item['@id']) - when Object - # Embedded/quoted statement - # (No error checking, as this is done in expansion) - to_enum(:item_to_rdf, item['@id'], quoted: true).to_a.first - end + subject = case item['@id'] + when nil then node + when String then as_resource(item['@id']) + when Object + # Embedded/quoted statement + # (No error checking, as this is done in expansion) + to_enum(:item_to_rdf, item['@id'], quoted: true).to_a.first + end - # log_debug("item_to_rdf") {"subject: #{subject.to_ntriples rescue 'malformed rdf'}"} - item.each do |property, values| - case property - when '@type' - # If property is @type, construct triple as an RDF Triple composed of id, rdf:type, and object from values where id and object are represented either as IRIs or Blank Nodes - values.each do |v| - object = as_resource(v) - # log_debug("item_to_rdf") {"type: #{object.to_ntriples rescue 'malformed rdf'}"} - yield RDF::Statement(subject, RDF.type, object, graph_name: graph_name, quoted: quoted) - end - when '@graph' - values = [values].compact unless values.is_a?(Array) - values.each do |nd| - item_to_rdf(nd, graph_name: subject, quoted: quoted, &block) - end - when '@reverse' - raise "Huh?" unless values.is_a?(Hash) - values.each do |prop, vv| - predicate = as_resource(prop) - # log_debug("item_to_rdf") {"@reverse predicate: #{predicate.to_ntriples rescue 'malformed rdf'}"} - # For each item in values - vv.each do |v| - # Item is a node definition. Generate object as the result of the Object Converstion algorithm passing item. - object = item_to_rdf(v, graph_name: graph_name, &block) - # log_debug("item_to_rdf") {"subject: #{object.to_ntriples rescue 'malformed rdf'}"} - # yield subject, prediate, and literal to results. - yield RDF::Statement(object, predicate, subject, graph_name: graph_name, quoted: quoted) + # log_debug("item_to_rdf") {"subject: #{subject.to_ntriples rescue 'malformed rdf'}"} + item.each do |property, values| + case property + when '@type' + # If property is @type, construct triple as an RDF Triple composed of id, rdf:type, and object from values where id and object are represented either as IRIs or Blank Nodes + values.each do |v| + object = as_resource(v) + # log_debug("item_to_rdf") {"type: #{object.to_ntriples rescue 'malformed rdf'}"} + yield RDF::Statement(subject, RDF.type, object, graph_name: graph_name, quoted: quoted) end - end - when '@included' - values.each do |v| - item_to_rdf(v, graph_name: graph_name, &block) - end - when /^@/ - # Otherwise, if @type is any other keyword, skip to the next property-values pair - else - # Otherwise, property is an IRI or Blank Node identifier - # Initialize predicate from property as an IRI or Blank node - predicate = as_resource(property) - # log_debug("item_to_rdf") {"predicate: #{predicate.to_ntriples rescue 'malformed rdf'}"} - - # For each item in values - values.each do |v| - if list?(v) - # log_debug("item_to_rdf") {"list: #{v.inspect}"} - # If item is a list object, initialize list_results as an empty array, and object to the result of the List Conversion algorithm, passing the value associated with the @list key from item and list_results. - object = parse_list(v['@list'], graph_name: graph_name, &block) - - # Append a triple composed of subject, prediate, and object to results and add all triples from list_results to results. - yield RDF::Statement(subject, predicate, object, graph_name: graph_name, quoted: quoted) - else - # Otherwise, item is a value object or a node definition. Generate object as the result of the Object Converstion algorithm passing item. - object = item_to_rdf(v, graph_name: graph_name, &block) - # log_debug("item_to_rdf") {"object: #{object.to_ntriples rescue 'malformed rdf'}"} - # yield subject, prediate, and literal to results. + when '@graph' + values = [values].compact unless values.is_a?(Array) + values.each do |nd| + item_to_rdf(nd, graph_name: subject, quoted: quoted, &block) + end + when '@reverse' + raise "Huh?" unless values.is_a?(Hash) + + values.each do |prop, vv| + predicate = as_resource(prop) + # log_debug("item_to_rdf") {"@reverse predicate: #{predicate.to_ntriples rescue 'malformed rdf'}"} + # For each item in values + vv.each do |v| + # Item is a node definition. Generate object as the result of the Object Converstion algorithm passing item. + object = item_to_rdf(v, graph_name: graph_name, &block) + # log_debug("item_to_rdf") {"subject: #{object.to_ntriples rescue 'malformed rdf'}"} + # yield subject, prediate, and literal to results. + yield RDF::Statement(object, predicate, subject, graph_name: graph_name, quoted: quoted) + end + end + when '@included' + values.each do |v| + item_to_rdf(v, graph_name: graph_name, &block) + end + when /^@/ + # Otherwise, if @type is any other keyword, skip to the next property-values pair + else + # Otherwise, property is an IRI or Blank Node identifier + # Initialize predicate from property as an IRI or Blank node + predicate = as_resource(property) + # log_debug("item_to_rdf") {"predicate: #{predicate.to_ntriples rescue 'malformed rdf'}"} + + # For each item in values + values.each do |v| + if list?(v) + # log_debug("item_to_rdf") {"list: #{v.inspect}"} + # If item is a list object, initialize list_results as an empty array, and object to the result of the List Conversion algorithm, passing the value associated with the @list key from item and list_results. + object = parse_list(v['@list'], graph_name: graph_name, &block) + + # Append a triple composed of subject, prediate, and object to results and add all triples from list_results to results. + else + # Otherwise, item is a value object or a node definition. Generate object as the result of the Object Converstion algorithm passing item. + object = item_to_rdf(v, graph_name: graph_name, &block) + # log_debug("item_to_rdf") {"object: #{object.to_ntriples rescue 'malformed rdf'}"} + # yield subject, prediate, and literal to results. + end yield RDF::Statement(subject, predicate, object, graph_name: graph_name, quoted: quoted) end end end - end - - subject - end - ## - # Parse a List - # - # @param [Array] list - # The Array to serialize as a list - # @yield statement - # @yieldparam [RDF::Resource] statement - # @return [Array] - # Statements for each item in the list - def parse_list(list, graph_name: nil, &block) - # log_debug('parse_list') {"list: #{list.inspect}"} - - last = list.pop - result = first_bnode = last ? node : RDF.nil - - list.each do |list_item| - # Set first to the result of the Object Converstion algorithm passing item. - object = item_to_rdf(list_item, graph_name: graph_name, &block) - yield RDF::Statement(first_bnode, RDF.first, object, graph_name: graph_name) - rest_bnode = node - yield RDF::Statement(first_bnode, RDF.rest, rest_bnode, graph_name: graph_name) - first_bnode = rest_bnode + subject end - if last - object = item_to_rdf(last, graph_name: graph_name, &block) - yield RDF::Statement(first_bnode, RDF.first, object, graph_name: graph_name) - yield RDF::Statement(first_bnode, RDF.rest, RDF.nil, graph_name: graph_name) + + ## + # Parse a List + # + # @param [Array] list + # The Array to serialize as a list + # @yield statement + # @yieldparam [RDF::Resource] statement + # @return [Array] + # Statements for each item in the list + def parse_list(list, graph_name: nil, &block) + # log_debug('parse_list') {"list: #{list.inspect}"} + + last = list.pop + result = first_bnode = last ? node : RDF.nil + + list.each do |list_item| + # Set first to the result of the Object Converstion algorithm passing item. + object = item_to_rdf(list_item, graph_name: graph_name, &block) + yield RDF::Statement(first_bnode, RDF.first, object, graph_name: graph_name) + rest_bnode = node + yield RDF::Statement(first_bnode, RDF.rest, rest_bnode, graph_name: graph_name) + first_bnode = rest_bnode + end + if last + object = item_to_rdf(last, graph_name: graph_name, &block) + yield RDF::Statement(first_bnode, RDF.first, object, graph_name: graph_name) + yield RDF::Statement(first_bnode, RDF.rest, RDF.nil, graph_name: graph_name) + end + result end - result - end - ## - # Create a new named node using the sequence - def node - RDF::Node.new(namer.get_sym) + ## + # Create a new named node using the sequence + def node + RDF::Node.new(namer.get_sym) + end end end end diff --git a/lib/json/ld/utils.rb b/lib/json/ld/utils.rb index 8b48effd..c0fd58bb 100644 --- a/lib/json/ld/utils.rb +++ b/lib/json/ld/utils.rb @@ -1,305 +1,311 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true -module JSON::LD - module Utils - ## - # Is value a node? A value is a node if - # * it is a Hash - # * it is not a @value, @set or @list - # * it has more than 1 key or any key is not @id - # @param [Object] value - # @return [Boolean] - def node?(value) - value.is_a?(Hash) && - !(value.key?('@value') || value.key?('@list') || value.key?('@set')) && - (value.length > 1 || !value.key?('@id')) - end - ## - # Is value a node reference? - # @param [Object] value - # @return [Boolean] - def node_reference?(value) - value.is_a?(Hash) && value.length == 1 && value.key?('@id') - end +module JSON + module LD + module Utils + ## + # Is value a node? A value is a node if + # * it is a Hash + # * it is not a @value, @set or @list + # * it has more than 1 key or any key is not @id + # @param [Object] value + # @return [Boolean] + def node?(value) + value.is_a?(Hash) && + !(value.key?('@value') || value.key?('@list') || value.key?('@set')) && + (value.length > 1 || !value.key?('@id')) + end - ## - # Is value a node or a node reference reference? - # @param [Object] value - # @return [Boolean] - def node_or_ref?(value) - value.is_a?(Hash) && - !(value.key?('@value') || value.key?('@list') || value.key?('@set')) - end + ## + # Is value a node reference? + # @param [Object] value + # @return [Boolean] + def node_reference?(value) + value.is_a?(Hash) && value.length == 1 && value.key?('@id') + end - ## - # Is value a blank node? Value is a blank node - # - # @param [Object] value - # @return [Boolean] - def blank_node?(value) - case value - when nil then true - when String then value.start_with?('_:') - else - (node?(value) || node_reference?(value)) && value.fetch('@id', '_:').start_with?('_:') + ## + # Is value a node or a node reference reference? + # @param [Object] value + # @return [Boolean] + def node_or_ref?(value) + value.is_a?(Hash) && + !(value.key?('@value') || value.key?('@list') || value.key?('@set')) end - end - ## - # Is value an expaned @graph? - # - # Note: A value is a graph if all of these hold true: - # 1. It is an object. - # 2. It has an `@graph` key. - # 3. It may have '@context', '@id' or '@index' - # - # @param [Object] value - # @return [Boolean] - def graph?(value) - value.is_a?(Hash) && (value.keys - UTIL_GRAPH_KEYS) == ['@graph'] - end + ## + # Is value a blank node? Value is a blank node + # + # @param [Object] value + # @return [Boolean] + def blank_node?(value) + case value + when nil then true + when String then value.start_with?('_:') + else + (node?(value) || node_reference?(value)) && value.fetch('@id', '_:').start_with?('_:') + end + end - ## - # Is value a simple graph (lacking @id)? - # - # @param [Object] value - # @return [Boolean] - def simple_graph?(value) - graph?(value) && !value.key?('@id') - end - - ## - # Is value an expaned @list? - # - # @param [Object] value - # @return [Boolean] - def list?(value) - value.is_a?(Hash) && value.key?('@list') - end + ## + # Is value an expaned @graph? + # + # Note: A value is a graph if all of these hold true: + # 1. It is an object. + # 2. It has an `@graph` key. + # 3. It may have '@context', '@id' or '@index' + # + # @param [Object] value + # @return [Boolean] + def graph?(value) + value.is_a?(Hash) && (value.keys - UTIL_GRAPH_KEYS) == ['@graph'] + end - ## - # Is value annotated? - # - # @param [Object] value - # @return [Boolean] - def index?(value) - value.is_a?(Hash) && value.key?('@index') - end + ## + # Is value a simple graph (lacking @id)? + # + # @param [Object] value + # @return [Boolean] + def simple_graph?(value) + graph?(value) && !value.key?('@id') + end - ## - # Is value literal? - # - # @param [Object] value - # @return [Boolean] - def value?(value) - value.is_a?(Hash) && value.key?('@value') - end + ## + # Is value an expaned @list? + # + # @param [Object] value + # @return [Boolean] + def list?(value) + value.is_a?(Hash) && value.key?('@list') + end - ## - # Represent an id as an IRI or Blank Node - # @param [String] id - # @param [RDF::URI] base (nil) - # @return [RDF::Resource] - def as_resource(id, base = nil) - @nodes ||= {} # Re-use BNodes - if id.start_with?('_:') - (@nodes[id] ||= RDF::Node.new(namer.get_sym(id))) - elsif base - base.join(id) - else - RDF::URI(id) + ## + # Is value annotated? + # + # @param [Object] value + # @return [Boolean] + def index?(value) + value.is_a?(Hash) && value.key?('@index') end - end - ## - # Represent as an array - # @param [Object] object - # @return [Array] - def as_array(object) - object.is_a?(Array) ? object : [object] - end + ## + # Is value literal? + # + # @param [Object] value + # @return [Boolean] + def value?(value) + value.is_a?(Hash) && value.key?('@value') + end - ## - # Compares two JSON-LD values for equality. Two JSON-LD values will be - # considered equal if: - # - # 1. They are both primitives of the same type and value. - # 2. They are both @values with the same @value, @type, @language, - # and @index, OR - # 3. They both have @ids that are the same. - # - # @param [Object] v1 the first value. - # @param [Object] v2 the second value. - # - # @return [Boolean] v1 and v2 are considered equal - def compare_values(v1, v2) - case - when node_or_ref?(v1) && node_or_ref?(v2) then v1['@id'] && v1['@id'] == v2['@id'] - when value?(v1) && value?(v2) - v1['@value'] == v2['@value'] && - v1['@type'] == v2['@type'] && - v1['@language'] == v2['@language'] && - v1['@index'] == v2['@index'] - else - v1 == v2 + ## + # Represent an id as an IRI or Blank Node + # @param [String] id + # @param [RDF::URI] base (nil) + # @return [RDF::Resource] + def as_resource(id, base = nil) + @nodes ||= {} # Re-use BNodes + if id.start_with?('_:') + (@nodes[id] ||= RDF::Node.new(namer.get_sym(id))) + elsif base + base.join(id) + else + RDF::URI(id) + end + end + + ## + # Represent as an array + # @param [Object] object + # @return [Array] + def as_array(object) + object.is_a?(Array) ? object : [object] end - end - # Adds a value to a subject. If the value is an array, all values in the - # array will be added. - # - # @param [Hash] subject the hash to add the value to. - # @param [String] property the property that relates the value to the subject. - # @param [Object] value the value to add. - # @param [Boolean] property_is_array (false) - # true if the property is always an array, false if not. - # @param [Boolean] value_is_array (false) - # true if the value to be added should be preserved as an array (lists) - # @param [Boolean] allow_duplicate (true) - # true to allow duplicates, false not to (uses - # a simple shallow comparison of subject ID or value). - def add_value(subject, property, value, property_is_array: false, value_is_array: false, allow_duplicate: true) - if value_is_array - subject[property] = value - elsif value.is_a?(Array) - subject[property] = [] if value.empty? && property_is_array - value.each do |v| - add_value(subject, property, v, - property_is_array: property_is_array, allow_duplicate: allow_duplicate) + ## + # Compares two JSON-LD values for equality. Two JSON-LD values will be + # considered equal if: + # + # 1. They are both primitives of the same type and value. + # 2. They are both @values with the same @value, @type, @language, + # and @index, OR + # 3. They both have @ids that are the same. + # + # @param [Object] v1 the first value. + # @param [Object] v2 the second value. + # + # @return [Boolean] v1 and v2 are considered equal + def compare_values(v1, v2) + if node_or_ref?(v1) && node_or_ref?(v2) + v1['@id'] && v1['@id'] == v2['@id'] + elsif value?(v1) && value?(v2) + v1['@value'] == v2['@value'] && + v1['@type'] == v2['@type'] && + v1['@language'] == v2['@language'] && + v1['@index'] == v2['@index'] + else + v1 == v2 end - elsif subject[property] - # check if subject already has value if duplicates not allowed - _has_value = !allow_duplicate && has_value?(subject, property, value) + end + + # Adds a value to a subject. If the value is an array, all values in the + # array will be added. + # + # @param [Hash] subject the hash to add the value to. + # @param [String] property the property that relates the value to the subject. + # @param [Object] value the value to add. + # @param [Boolean] property_is_array (false) + # true if the property is always an array, false if not. + # @param [Boolean] value_is_array (false) + # true if the value to be added should be preserved as an array (lists) + # @param [Boolean] allow_duplicate (true) + # true to allow duplicates, false not to (uses + # a simple shallow comparison of subject ID or value). + def add_value(subject, property, value, property_is_array: false, value_is_array: false, allow_duplicate: true) + if value_is_array + subject[property] = value + elsif value.is_a?(Array) + subject[property] = [] if value.empty? && property_is_array + value.each do |v| + add_value(subject, property, v, + property_is_array: property_is_array, allow_duplicate: allow_duplicate) + end + elsif subject[property] + # check if subject already has value if duplicates not allowed + _has_value = !allow_duplicate && has_value?(subject, property, value) - # make property an array if value not present or always an array - if !subject[property].is_a?(Array) && (!_has_value || property_is_array) - subject[property] = [subject[property]] + # make property an array if value not present or always an array + if !subject[property].is_a?(Array) && (!_has_value || property_is_array) + subject[property] = + [subject[property]] + end + subject[property] << value unless _has_value + else + subject[property] = property_is_array ? [value] : value end - subject[property] << value unless _has_value - else - subject[property] = property_is_array ? [value] : value end - end - # Returns True if the given subject has the given property. - # - # @param subject the subject to check. - # @param property the property to look for. - # - # @return [Boolean] true if the subject has the given property, false if not. - def property?(subject, property) - return false unless value = subject[property] - !value.is_a?(Array) || !value.empty? - end + # Returns True if the given subject has the given property. + # + # @param subject the subject to check. + # @param property the property to look for. + # + # @return [Boolean] true if the subject has the given property, false if not. + def property?(subject, property) + return false unless (value = subject[property]) + + !value.is_a?(Array) || !value.empty? + end - # Determines if the given value is a property of the given subject. - # - # @param [Hash] subject the subject to check. - # @param [String] property the property to check. - # @param [Object] value the value to check. - # - # @return [Boolean] true if the value exists, false if not. - def has_value?(subject, property, value) - if property?(subject, property) - val = subject[property] - is_list = list?(val) - if val.is_a?(Array) || is_list - val = val['@list'] if is_list - val.any? {|v| compare_values(value, v)} - elsif !val.is_a?(Array) - compare_values(value, val) + # Determines if the given value is a property of the given subject. + # + # @param [Hash] subject the subject to check. + # @param [String] property the property to check. + # @param [Object] value the value to check. + # + # @return [Boolean] true if the value exists, false if not. + def has_value?(subject, property, value) + if property?(subject, property) + val = subject[property] + is_list = list?(val) + if val.is_a?(Array) || is_list + val = val['@list'] if is_list + val.any? { |v| compare_values(value, v) } + elsif !val.is_a?(Array) + compare_values(value, val) + else + false + end else false end - else - false end - end - private - UTIL_GRAPH_KEYS = %w(@context @id @index).freeze + private + + UTIL_GRAPH_KEYS = %w[@context @id @index].freeze + + # Merge the last value into an array based for the specified key if hash is not null and value is not already in that array + def merge_value(hash, key, value) + return unless hash - # Merge the last value into an array based for the specified key if hash is not null and value is not already in that array - def merge_value(hash, key, value) - return unless hash - values = hash[key] ||= [] - if key == '@list' - values << value - elsif list?(value) - values << value - elsif !values.include?(value) - values << value + values = hash[key] ||= [] + if key == '@list' + values << value + elsif list?(value) + values << value + elsif !values.include?(value) + values << value + end end end - end - ## - # Utility class for mapping old blank node identifiers, or unnamed blank - # nodes to new identifiers - class BlankNodeMapper < Hash ## - # Just return a Blank Node based on `old`. Manufactures - # a node if `old` is nil or empty - # @param [String] old ("") - # @return [String] - def get_sym(old = "") - old = RDF::Node.new.to_s if old.to_s.empty? - old.to_s.sub(/_:/, '') - end + # Utility class for mapping old blank node identifiers, or unnamed blank + # nodes to new identifiers + class BlankNodeMapper < Hash + ## + # Just return a Blank Node based on `old`. Manufactures + # a node if `old` is nil or empty + # @param [String] old ("") + # @return [String] + def get_sym(old = "") + old = RDF::Node.new.to_s if old.to_s.empty? + old.to_s.sub(/_:/, '') + end - ## - # Get a new mapped name for `old` - # - # @param [String] old ("") - # @return [String] - def get_name(old = "") - "_:" + get_sym(old) + ## + # Get a new mapped name for `old` + # + # @param [String] old ("") + # @return [String] + def get_name(old = "") + "_:" + get_sym(old) + end end - end - class BlankNodeUniqer < BlankNodeMapper - ## - # Use the uniquely generated bnodes, rather than a sequence - # @param [String] old ("") - # @return [String] - def get_sym(old = "") - old = old.to_s.sub(/_:/, '') - if old && self.key?(old) - self[old] - elsif !old.empty? - self[old] = RDF::Node.new.to_unique_base[2..-1] - else - RDF::Node.new.to_unique_base[2..-1] + class BlankNodeUniqer < BlankNodeMapper + ## + # Use the uniquely generated bnodes, rather than a sequence + # @param [String] old ("") + # @return [String] + def get_sym(old = "") + old = old.to_s.sub(/_:/, '') + if old && key?(old) + self[old] + elsif !old.empty? + self[old] = RDF::Node.new.to_unique_base[2..] + else + RDF::Node.new.to_unique_base[2..] + end end end - end - class BlankNodeNamer < BlankNodeMapper - # @param [String] prefix - def initialize(prefix) - @prefix = prefix.to_s - @num = 0 - super - end + class BlankNodeNamer < BlankNodeMapper + # @param [String] prefix + def initialize(prefix) + @prefix = prefix.to_s + @num = 0 + super + end - ## - # Get a new symbol mapped from `old` - # @param [String] old ("") - # @return [String] - def get_sym(old = "") - old = old.to_s.sub(/_:/, '') - if !old.empty? && self.key?(old) - self[old] - elsif !old.empty? - @num += 1 - #puts "allocate #{@prefix + (@num - 1).to_s} to #{old.inspect}" - self[old] = @prefix + (@num - 1).to_s - else - # Not referenced, just return a new unique value - @num += 1 - #puts "allocate #{@prefix + (@num - 1).to_s} to #{old.inspect}" - @prefix + (@num - 1).to_s + ## + # Get a new symbol mapped from `old` + # @param [String] old ("") + # @return [String] + def get_sym(old = "") + old = old.to_s.sub(/_:/, '') + if !old.empty? && key?(old) + self[old] + elsif !old.empty? + @num += 1 + # puts "allocate #{@prefix + (@num - 1).to_s} to #{old.inspect}" + self[old] = @prefix + (@num - 1).to_s + else + # Not referenced, just return a new unique value + @num += 1 + # puts "allocate #{@prefix + (@num - 1).to_s} to #{old.inspect}" + @prefix + (@num - 1).to_s + end end end end diff --git a/lib/json/ld/version.rb b/lib/json/ld/version.rb index b657e4c7..5a237d23 100644 --- a/lib/json/ld/version.rb +++ b/lib/json/ld/version.rb @@ -1,20 +1,30 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true -module JSON::LD::VERSION - VERSION_FILE = File.join(File.expand_path(File.dirname(__FILE__)), "..", "..", "..", "VERSION") - MAJOR, MINOR, TINY, EXTRA = File.read(VERSION_FILE).chomp.split(".") - STRING = [MAJOR, MINOR, TINY, EXTRA].compact.join('.') +module JSON + module LD + module VERSION + VERSION_FILE = File.join(__dir__, "..", "..", "..", "VERSION") + MAJOR, MINOR, TINY, EXTRA = File.read(VERSION_FILE).chomp.split(".") - ## - # @return [String] - def self.to_s() STRING end + STRING = [MAJOR, MINOR, TINY, EXTRA].compact.join('.') - ## - # @return [String] - def self.to_str() STRING end + ## + # @return [String] + def self.to_s + STRING + end - ## - # @return [Array(Integer, Integer, Integer)] - def self.to_a() STRING.split(".") end + ## + # @return [String] + def self.to_str + STRING + end + + ## + # @return [Array(Integer, Integer, Integer)] + def self.to_a + STRING.split(".") + end + end + end end diff --git a/lib/json/ld/writer.rb b/lib/json/ld/writer.rb index 096a1d43..c5be5aa1 100644 --- a/lib/json/ld/writer.rb +++ b/lib/json/ld/writer.rb @@ -1,344 +1,367 @@ -# -*- encoding: utf-8 -*- # frozen_string_literal: true + require 'json/ld/streaming_writer' require 'link_header' -module JSON::LD - ## - # A JSON-LD parser in Ruby. - # - # Note that the natural interface is to write a whole graph at a time. - # Writing statements or Triples will create a graph to add them to - # and then serialize the graph. - # - # @example Obtaining a JSON-LD writer class - # RDF::Writer.for(:jsonld) #=> JSON::LD::Writer - # RDF::Writer.for("etc/test.json") - # RDF::Writer.for(:file_name => "etc/test.json") - # RDF::Writer.for(file_extension: "json") - # RDF::Writer.for(:content_type => "application/turtle") - # - # @example Serializing RDF graph into an JSON-LD file - # JSON::LD::Writer.open("etc/test.json") do |writer| - # writer << graph - # end - # - # @example Serializing RDF statements into an JSON-LD file - # JSON::LD::Writer.open("etc/test.json") do |writer| - # graph.each_statement do |statement| - # writer << statement - # end - # end - # - # @example Serializing RDF statements into an JSON-LD string - # JSON::LD::Writer.buffer do |writer| - # graph.each_statement do |statement| - # writer << statement - # end - # end - # - # The writer will add prefix definitions, and use them for creating @context definitions, and minting CURIEs - # - # @example Creating @@context prefix definitions in output - # JSON::LD::Writer.buffer( - # prefixes: { - # nil => "http://example.com/ns#", - # foaf: "http://xmlns.com/foaf/0.1/"} - # ) do |writer| - # graph.each_statement do |statement| - # writer << statement - # end - # end - # - # Select the :expand option to output JSON-LD in expanded form - # - # @see https://www.w3.org/TR/json-ld11-api/ - # @see https://www.w3.org/TR/json-ld11-api/#the-normalization-algorithm - # @author [Gregg Kellogg](http://greggkellogg.net/) - class Writer < RDF::Writer - include StreamingWriter - include Utils - include RDF::Util::Logger - format Format - - # @!attribute [r] graph - # @return [RDF::Graph] Graph of statements serialized - attr_reader :graph - - # @!attribute [r] context - # @return [Context] context used to load and administer contexts - attr_reader :context - +module JSON + module LD ## - # JSON-LD Writer options - # @see https://ruby-rdf.github.io/rdf/RDF/Writer#options-class_method - def self.options - super + [ - RDF::CLI::Option.new( - symbol: :compactArrays, - datatype: TrueClass, - default: true, - control: :checkbox, - on: ["--[no-]compact-arrays"], - description: "Replaces arrays with just one element with that element during compaction. Default is `true` use --no-compact-arrays to disable.") {|arg| arg}, - RDF::CLI::Option.new( - symbol: :compactToRelative, - datatype: TrueClass, - default: true, - control: :checkbox, - on: ["--[no-]compact-to-relative"], - description: "Creates document relative IRIs when compacting, if `true`, otherwise leaves expanded. Default is `true` use --no-compact-to-relative to disable.") {|arg| arg}, - RDF::CLI::Option.new( - symbol: :context, - datatype: RDF::URI, - control: :url2, - on: ["--context CONTEXT"], - description: "Context to use when compacting.") {|arg| RDF::URI(arg).absolute? ? RDF::URI(arg) : StringIO.new(File.read(arg))}, - RDF::CLI::Option.new( - symbol: :embed, - datatype: %w(@always @once @never), - default: '@once', - control: :select, - on: ["--embed EMBED"], - description: "How to embed matched objects (@once).") {|arg| RDF::URI(arg)}, - RDF::CLI::Option.new( - symbol: :explicit, - datatype: TrueClass, - control: :checkbox, - on: ["--[no-]explicit"], - description: "Only include explicitly declared properties in output (false)") {|arg| arg}, - RDF::CLI::Option.new( - symbol: :frame, - datatype: RDF::URI, - control: :url2, - use: :required, - on: ["--frame FRAME"], - description: "Frame to use when serializing.") {|arg| RDF::URI(arg).absolute? ? RDF::URI(arg) : StringIO.new(File.read(arg))}, - RDF::CLI::Option.new( - symbol: :lowercaseLanguage, - datatype: TrueClass, - control: :checkbox, - on: ["--[no-]lowercase-language"], - description: "By default, language tags are left as is. To normalize to lowercase, set this option to `true`."), - RDF::CLI::Option.new( - symbol: :omitDefault, - datatype: TrueClass, - control: :checkbox, - on: ["--[no-]omitDefault"], - description: "Omit missing properties from output (false)") {|arg| arg}, - RDF::CLI::Option.new( - symbol: :ordered, - datatype: TrueClass, - control: :checkbox, - on: ["--[no-]ordered"], - description: "Order object member processing lexographically.") {|arg| arg}, - RDF::CLI::Option.new( - symbol: :processingMode, - datatype: %w(json-ld-1.0 json-ld-1.1), - control: :radio, - on: ["--processingMode MODE", %w(json-ld-1.0 json-ld-1.1)], - description: "Set Processing Mode (json-ld-1.0 or json-ld-1.1)"), - RDF::CLI::Option.new( - symbol: :rdfDirection, - datatype: %w(i18n-datatype compound-literal), - default: 'null', - control: :select, - on: ["--rdf-direction DIR", %w(i18n-datatype compound-literal)], - description: "How to serialize literal direction (i18n-datatype compound-literal)") {|arg| arg}, - RDF::CLI::Option.new( - symbol: :requireAll, - datatype: TrueClass, - default: true, - control: :checkbox, - on: ["--[no-]require-all"], - description: "Require all properties to match (true). Default is `true` use --no-require-all to disable.") {|arg| arg}, - RDF::CLI::Option.new( - symbol: :stream, - datatype: TrueClass, - control: :checkbox, - on: ["--[no-]stream"], - description: "Do not attempt to optimize graph presentation, suitable for streaming large graphs.") {|arg| arg}, - RDF::CLI::Option.new( - symbol: :useNativeTypes, - datatype: TrueClass, - control: :checkbox, - on: ["--[no-]use-native-types"], - description: "Use native JSON values in value objects.") {|arg| arg}, - RDF::CLI::Option.new( - symbol: :useRdfType, - datatype: TrueClass, - control: :checkbox, - on: ["--[no-]use-rdf-type"], - description: "Treat `rdf:type` like a normal property instead of using `@type`.") {|arg| arg}, - ] - end + # A JSON-LD parser in Ruby. + # + # Note that the natural interface is to write a whole graph at a time. + # Writing statements or Triples will create a graph to add them to + # and then serialize the graph. + # + # @example Obtaining a JSON-LD writer class + # RDF::Writer.for(:jsonld) #=> JSON::LD::Writer + # RDF::Writer.for("etc/test.json") + # RDF::Writer.for(:file_name => "etc/test.json") + # RDF::Writer.for(file_extension: "json") + # RDF::Writer.for(:content_type => "application/turtle") + # + # @example Serializing RDF graph into an JSON-LD file + # JSON::LD::Writer.open("etc/test.json") do |writer| + # writer << graph + # end + # + # @example Serializing RDF statements into an JSON-LD file + # JSON::LD::Writer.open("etc/test.json") do |writer| + # graph.each_statement do |statement| + # writer << statement + # end + # end + # + # @example Serializing RDF statements into an JSON-LD string + # JSON::LD::Writer.buffer do |writer| + # graph.each_statement do |statement| + # writer << statement + # end + # end + # + # The writer will add prefix definitions, and use them for creating @context definitions, and minting CURIEs + # + # @example Creating @@context prefix definitions in output + # JSON::LD::Writer.buffer( + # prefixes: { + # nil => "http://example.com/ns#", + # foaf: "http://xmlns.com/foaf/0.1/"} + # ) do |writer| + # graph.each_statement do |statement| + # writer << statement + # end + # end + # + # Select the :expand option to output JSON-LD in expanded form + # + # @see https://www.w3.org/TR/json-ld11-api/ + # @see https://www.w3.org/TR/json-ld11-api/#the-normalization-algorithm + # @author [Gregg Kellogg](http://greggkellogg.net/) + class Writer < RDF::Writer + include StreamingWriter + include Utils + include RDF::Util::Logger + format Format + + # @!attribute [r] graph + # @return [RDF::Graph] Graph of statements serialized + attr_reader :graph - class << self - attr_reader :white_list - attr_reader :black_list + # @!attribute [r] context + # @return [Context] context used to load and administer contexts + attr_reader :context ## - # Use parameters from accept-params to determine if the parameters are acceptable to invoke this writer. The `accept_params` will subsequently be provided to the writer instance. - # - # @param [Hash{Symbol => String}] accept_params - # @yield [accept_params] if a block is given, returns the result of evaluating that block - # @yieldparam [Hash{Symbol => String}] accept_params - # @return [Boolean] - # @see http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1 - def accept?(accept_params) - if block_given? - yield(accept_params) - else - true - end + # JSON-LD Writer options + # @see https://ruby-rdf.github.io/rdf/RDF/Writer#options-class_method + def self.options + super + [ + RDF::CLI::Option.new( + symbol: :compactArrays, + datatype: TrueClass, + default: true, + control: :checkbox, + on: ["--[no-]compact-arrays"], + description: "Replaces arrays with just one element with that element during compaction. Default is `true` use --no-compact-arrays to disable." + ) { |arg| arg }, + RDF::CLI::Option.new( + symbol: :compactToRelative, + datatype: TrueClass, + default: true, + control: :checkbox, + on: ["--[no-]compact-to-relative"], + description: "Creates document relative IRIs when compacting, if `true`, otherwise leaves expanded. Default is `true` use --no-compact-to-relative to disable." + ) { |arg| arg }, + RDF::CLI::Option.new( + symbol: :context, + datatype: RDF::URI, + control: :url2, + on: ["--context CONTEXT"], + description: "Context to use when compacting." + ) { |arg| RDF::URI(arg).absolute? ? RDF::URI(arg) : StringIO.new(File.read(arg)) }, + RDF::CLI::Option.new( + symbol: :embed, + datatype: %w[@always @once @never], + default: '@once', + control: :select, + on: ["--embed EMBED"], + description: "How to embed matched objects (@once)." + ) { |arg| RDF::URI(arg) }, + RDF::CLI::Option.new( + symbol: :explicit, + datatype: TrueClass, + control: :checkbox, + on: ["--[no-]explicit"], + description: "Only include explicitly declared properties in output (false)" + ) { |arg| arg }, + RDF::CLI::Option.new( + symbol: :frame, + datatype: RDF::URI, + control: :url2, + use: :required, + on: ["--frame FRAME"], + description: "Frame to use when serializing." + ) { |arg| RDF::URI(arg).absolute? ? RDF::URI(arg) : StringIO.new(File.read(arg)) }, + RDF::CLI::Option.new( + symbol: :lowercaseLanguage, + datatype: TrueClass, + control: :checkbox, + on: ["--[no-]lowercase-language"], + description: "By default, language tags are left as is. To normalize to lowercase, set this option to `true`." + ), + RDF::CLI::Option.new( + symbol: :omitDefault, + datatype: TrueClass, + control: :checkbox, + on: ["--[no-]omitDefault"], + description: "Omit missing properties from output (false)" + ) { |arg| arg }, + RDF::CLI::Option.new( + symbol: :ordered, + datatype: TrueClass, + control: :checkbox, + on: ["--[no-]ordered"], + description: "Order object member processing lexographically." + ) { |arg| arg }, + RDF::CLI::Option.new( + symbol: :processingMode, + datatype: %w[json-ld-1.0 json-ld-1.1], + control: :radio, + on: ["--processingMode MODE", %w[json-ld-1.0 json-ld-1.1]], + description: "Set Processing Mode (json-ld-1.0 or json-ld-1.1)" + ), + RDF::CLI::Option.new( + symbol: :rdfDirection, + datatype: %w[i18n-datatype compound-literal], + default: 'null', + control: :select, + on: ["--rdf-direction DIR", %w[i18n-datatype compound-literal]], + description: "How to serialize literal direction (i18n-datatype compound-literal)" + ) { |arg| arg }, + RDF::CLI::Option.new( + symbol: :requireAll, + datatype: TrueClass, + default: true, + control: :checkbox, + on: ["--[no-]require-all"], + description: "Require all properties to match (true). Default is `true` use --no-require-all to disable." + ) { |arg| arg }, + RDF::CLI::Option.new( + symbol: :stream, + datatype: TrueClass, + control: :checkbox, + on: ["--[no-]stream"], + description: "Do not attempt to optimize graph presentation, suitable for streaming large graphs." + ) { |arg| arg }, + RDF::CLI::Option.new( + symbol: :useNativeTypes, + datatype: TrueClass, + control: :checkbox, + on: ["--[no-]use-native-types"], + description: "Use native JSON values in value objects." + ) { |arg| arg }, + RDF::CLI::Option.new( + symbol: :useRdfType, + datatype: TrueClass, + control: :checkbox, + on: ["--[no-]use-rdf-type"], + description: "Treat `rdf:type` like a normal property instead of using `@type`." + ) { |arg| arg } + ] end - ## - # Returns default context used for compacted profile without an explicit context URL - # @return [String] - def default_context; @default_context || JSON::LD::DEFAULT_CONTEXT; end + class << self + attr_reader :white_list, :black_list - ## - # Sets default context used for compacted profile without an explicit context URL - # @param [String] url - def default_context=(url); @default_context = url; end - end + ## + # Use parameters from accept-params to determine if the parameters are acceptable to invoke this writer. The `accept_params` will subsequently be provided to the writer instance. + # + # @param [Hash{Symbol => String}] accept_params + # @yield [accept_params] if a block is given, returns the result of evaluating that block + # @yieldparam [Hash{Symbol => String}] accept_params + # @return [Boolean] + # @see http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1 + def accept?(accept_params) + if block_given? + yield(accept_params) + else + true + end + end - ## - # Initializes the JSON-LD writer instance. - # - # @param [IO, File] output - # the output stream - # @param [Hash{Symbol => Object}] options - # any additional options - # @option options [Encoding] :encoding (Encoding::UTF_8) - # the encoding to use on the output stream (Ruby 1.9+) - # @option options [Boolean] :canonicalize (false) - # whether to canonicalize literals when serializing - # @option options [Hash] :prefixes ({}) - # the prefix mappings to use (not supported by all writers) - # @option options [Boolean] :standard_prefixes (false) - # Add standard prefixes to @prefixes, if necessary. - # @option options [IO, Array, Hash, String, Context] :context ({}) - # context to use when serializing. Constructed context for native serialization. - # @option options [IO, Array, Hash, String, Context] :frame ({}) - # frame to use when serializing. - # @option options [Boolean] :unique_bnodes (false) - # Use unique bnode identifiers, defaults to using the identifier which the node was originall initialized with (if any). - # @option options [Proc] serializer (JSON::LD::API.serializer) - # A Serializer method used for generating the JSON serialization of the result. - # @option options [Boolean] :stream (false) - # Do not attempt to optimize graph presentation, suitable for streaming large graphs. - # @yield [writer] `self` - # @yieldparam [RDF::Writer] writer - # @yieldreturn [void] - # @yield [writer] - # @yieldparam [RDF::Writer] writer - def initialize(output = $stdout, **options, &block) - options[:base_uri] ||= options[:base] if options.key?(:base) - options[:base] ||= options[:base_uri] if options.key?(:base_uri) - @serializer = options.fetch(:serializer, JSON::LD::API.method(:serializer)) - super do - @repo = RDF::Repository.new + ## + # Returns default context used for compacted profile without an explicit context URL + # @return [String] + def default_context + @default_context || JSON::LD::DEFAULT_CONTEXT + end + + ## + # Sets default context used for compacted profile without an explicit context URL + # @param [String] url + attr_writer :default_context + end - if block_given? - case block.arity + ## + # Initializes the JSON-LD writer instance. + # + # @param [IO, File] output + # the output stream + # @param [Hash{Symbol => Object}] options + # any additional options + # @option options [Encoding] :encoding (Encoding::UTF_8) + # the encoding to use on the output stream (Ruby 1.9+) + # @option options [Boolean] :canonicalize (false) + # whether to canonicalize literals when serializing + # @option options [Hash] :prefixes ({}) + # the prefix mappings to use (not supported by all writers) + # @option options [Boolean] :standard_prefixes (false) + # Add standard prefixes to @prefixes, if necessary. + # @option options [IO, Array, Hash, String, Context] :context ({}) + # context to use when serializing. Constructed context for native serialization. + # @option options [IO, Array, Hash, String, Context] :frame ({}) + # frame to use when serializing. + # @option options [Boolean] :unique_bnodes (false) + # Use unique bnode identifiers, defaults to using the identifier which the node was originall initialized with (if any). + # @option options [Proc] serializer (JSON::LD::API.serializer) + # A Serializer method used for generating the JSON serialization of the result. + # @option options [Boolean] :stream (false) + # Do not attempt to optimize graph presentation, suitable for streaming large graphs. + # @yield [writer] `self` + # @yieldparam [RDF::Writer] writer + # @yieldreturn [void] + # @yield [writer] + # @yieldparam [RDF::Writer] writer + def initialize(output = $stdout, **options, &block) + options[:base_uri] ||= options[:base] if options.key?(:base) + options[:base] ||= options[:base_uri] if options.key?(:base_uri) + @serializer = options.fetch(:serializer, JSON::LD::API.method(:serializer)) + super do + @repo = RDF::Repository.new + + if block + case block.arity when 0 then instance_eval(&block) - else block.call(self) + else yield(self) + end end end end - end - ## - # Addes a triple to be serialized - # @param [RDF::Resource] subject - # @param [RDF::URI] predicate - # @param [RDF::Value] object - # @return [void] - # @abstract - def write_triple(subject, predicate, object) - write_quad(subject, predicate, object, nil) - end + ## + # Addes a triple to be serialized + # @param [RDF::Resource] subject + # @param [RDF::URI] predicate + # @param [RDF::Value] object + # @return [void] + # @abstract + def write_triple(subject, predicate, object) + write_quad(subject, predicate, object, nil) + end - ## - # Outputs the N-Quads representation of a statement. - # - # @param [RDF::Resource] subject - # @param [RDF::URI] predicate - # @param [RDF::Term] object - # @return [void] - def write_quad(subject, predicate, object, graph_name) - statement = RDF::Statement.new(subject, predicate, object, graph_name: graph_name) - if @options[:stream] - stream_statement(statement) - else - @repo.insert(statement) + ## + # Outputs the N-Quads representation of a statement. + # + # @param [RDF::Resource] subject + # @param [RDF::URI] predicate + # @param [RDF::Term] object + # @return [void] + def write_quad(subject, predicate, object, graph_name) + statement = RDF::Statement.new(subject, predicate, object, graph_name: graph_name) + if @options[:stream] + stream_statement(statement) + else + @repo.insert(statement) + end end - end - ## - # Necessary for streaming - # @return [void] `self` - def write_prologue - stream_prologue if @options[:stream] - super - end + ## + # Necessary for streaming + # @return [void] `self` + def write_prologue + stream_prologue if @options[:stream] + super + end - ## - # Outputs the Serialized JSON-LD representation of all stored statements. - # - # If provided a context or prefixes, we'll create a context - # and use it to compact the output. Otherwise, we return un-compacted JSON-LD - # - # @return [void] - # @see #write_triple - def write_epilogue - if @options[:stream] - stream_epilogue - else + ## + # Outputs the Serialized JSON-LD representation of all stored statements. + # + # If provided a context or prefixes, we'll create a context + # and use it to compact the output. Otherwise, we return un-compacted JSON-LD + # + # @return [void] + # @see #write_triple + def write_epilogue + if @options[:stream] + stream_epilogue + else - # log_debug("writer") { "serialize #{@repo.count} statements, #{@options.inspect}"} - result = API.fromRdf(@repo, **@options.merge(serializer: nil)) + # log_debug("writer") { "serialize #{@repo.count} statements, #{@options.inspect}"} + result = API.fromRdf(@repo, **@options.merge(serializer: nil)) - # Some options may be indicated from accept parameters - profile = @options.fetch(:accept_params, {}).fetch(:profile, "").split(' ') - links = LinkHeader.parse(@options[:link]) - @options[:context] ||= links.find_link(['rel', JSON_LD_NS+"context"]).href rescue nil - @options[:context] ||= Writer.default_context if profile.include?(JSON_LD_NS+"compacted") - @options[:frame] ||= links.find_link(['rel', JSON_LD_NS+"frame"]).href rescue nil + # Some options may be indicated from accept parameters + profile = @options.fetch(:accept_params, {}).fetch(:profile, "").split + links = LinkHeader.parse(@options[:link]) + @options[:context] ||= begin + links.find_link(['rel', JSON_LD_NS + "context"]).href + rescue StandardError + nil + end + @options[:context] ||= Writer.default_context if profile.include?(JSON_LD_NS + "compacted") + @options[:frame] ||= begin + links.find_link(['rel', JSON_LD_NS + "frame"]).href + rescue StandardError + nil + end - # If we were provided a context, or prefixes, use them to compact the output - context = @options[:context] - context ||= if @options[:prefixes] || @options[:language] || @options[:standard_prefixes] - ctx = Context.new(**@options) - ctx.language = @options[:language] if @options[:language] - @options[:prefixes].each do |prefix, iri| - ctx.set_mapping(prefix, iri) if prefix && iri - end if @options[:prefixes] - ctx - end + # If we were provided a context, or prefixes, use them to compact the output + context = @options[:context] + context ||= if @options[:prefixes] || @options[:language] || @options[:standard_prefixes] + ctx = Context.new(**@options) + ctx.language = @options[:language] if @options[:language] + @options[:prefixes]&.each do |prefix, iri| + ctx.set_mapping(prefix, iri) if prefix && iri + end + ctx + end - # Rename BNodes to uniquify them, if necessary - if options[:unique_bnodes] - result = API.flatten(result, context, **@options.merge(serializer: nil)) - end + # Rename BNodes to uniquify them, if necessary + result = API.flatten(result, context, **@options.merge(serializer: nil)) if options[:unique_bnodes] - if frame = @options[:frame] - # Perform framing, if given a frame - # log_debug("writer") { "frame result"} - result = API.frame(result, frame, **@options.merge(serializer: nil)) - elsif context - # Perform compaction, if we have a context - # log_debug("writer") { "compact result"} - result = API.compact(result, context, **@options.merge(serializer: nil)) + if (frame = @options[:frame]) + # Perform framing, if given a frame + # log_debug("writer") { "frame result"} + result = API.frame(result, frame, **@options.merge(serializer: nil)) + elsif context + # Perform compaction, if we have a context + # log_debug("writer") { "compact result"} + result = API.compact(result, context, **@options.merge(serializer: nil)) + end + + @output.write(@serializer.call(result, **@options)) end - @output.write(@serializer.call(result, **@options)) + super end - - super end end end - diff --git a/script/gen_context b/script/gen_context index 0a80b8d2..93b285de 100755 --- a/script/gen_context +++ b/script/gen_context @@ -80,7 +80,7 @@ end # Load vocabulary graph = RDF::Graph.load(ARGV[0]) -context = JSON::LD::Context.new(options). +context = JSON::LD::Context.new(**options). parse(base_context). from_vocabulary(graph) diff --git a/spec/api_spec.rb b/spec/api_spec.rb index 62690226..6da625be 100644 --- a/spec/api_spec.rb +++ b/spec/api_spec.rb @@ -1,37 +1,38 @@ +# frozen_string_literal: true -# coding: utf-8 require_relative 'spec_helper' describe JSON::LD::API do - let(:logger) {RDF::Spec.logger} - before {JSON::LD::Context::PRELOADED.clear} + let(:logger) { RDF::Spec.logger } + + before { JSON::LD::Context::PRELOADED.clear } describe "#initialize" do context "with string input" do let(:context) do - JSON::LD::API::RemoteDocument.new(%q({ + JSON::LD::API::RemoteDocument.new('{ "@context": { "xsd": "http://www.w3.org/2001/XMLSchema#", "name": "http://xmlns.com/foaf/0.1/name", "homepage": {"@id": "http://xmlns.com/foaf/0.1/homepage", "@type": "@id"}, "avatar": {"@id": "http://xmlns.com/foaf/0.1/avatar", "@type": "@id"} } - }), + }', documentUrl: "http://example.com/context", - contentType: 'application/ld+json' - ) + contentType: 'application/ld+json') end let(:remote_doc) do - JSON::LD::API::RemoteDocument.new(%q({"@id": "", "name": "foo"}), + JSON::LD::API::RemoteDocument.new('{"@id": "", "name": "foo"}', documentUrl: "http://example.com/foo", contentType: 'application/ld+json', - contextUrl: "http://example.com/context" - ) + contextUrl: "http://example.com/context") end it "loads document with loader and loads context" do - expect(described_class).to receive(:documentLoader).with("http://example.com/foo", anything).and_yield(remote_doc) - expect(described_class).to receive(:documentLoader).with("http://example.com/context", anything).and_yield(context) + expect(described_class).to receive(:documentLoader).with("http://example.com/foo", + anything).and_yield(remote_doc) + expect(described_class).to receive(:documentLoader).with("http://example.com/context", + anything).and_yield(context) described_class.new("http://example.com/foo", nil) end end @@ -39,12 +40,15 @@ context "when validating", pending: ("JRuby support for jsonlint" if RUBY_ENGINE == "jruby") do it "detects invalid JSON" do - expect {described_class.new(StringIO.new(%({"a": "b", "a": "c"})), nil, validate: true)}.to raise_error(JSON::LD::JsonLdError::LoadingDocumentFailed) + expect do + described_class.new(StringIO.new(%({"a": "b", "a": "c"})), nil, + validate: true) + end.to raise_error(JSON::LD::JsonLdError::LoadingDocumentFailed) end end context "Test Files" do - %i(oj json_gem ok_json yajl).each do |adapter| + %i[oj json_gem ok_json yajl].each do |adapter| context "with MultiJson adapter #{adapter.inspect}" do Dir.glob(File.expand_path(File.join(File.dirname(__FILE__), 'test-files/*-input.*'))) do |filename| test = File.basename(filename).sub(/-input\..*$/, '') @@ -54,43 +58,44 @@ context = filename.sub(/-input\..*$/, '-context.jsonld') expanded = filename.sub(/-input\..*$/, '-expanded.jsonld') ttl = filename.sub(/-input\..*$/, '-rdf.ttl') - - context test, skip: ("Not supported in JRuby" if RUBY_ENGINE == "jruby" && %w(oj yajl).include?(adapter.to_s)) do + + context test, + skip: ("Not supported in JRuby" if RUBY_ENGINE == "jruby" && %w[oj yajl].include?(adapter.to_s)) do around do |example| @file = File.open(filename) case filename when /.jsonld$/ - @file.define_singleton_method(:content_type) {'application/ld+json'} + @file.define_singleton_method(:content_type) { 'application/ld+json' } end if context @ctx_io = File.open(context) case context when /.jsonld$/ - @ctx_io.define_singleton_method(:content_type) {'application/ld+json'} + @ctx_io.define_singleton_method(:content_type) { 'application/ld+json' } end end example.run @file.close - @ctx_io.close if @ctx_io + @ctx_io&.close end if File.exist?(expanded) it "expands" do - options = {logger: logger, adapter: adapter} + options = { logger: logger, adapter: adapter } options[:expandContext] = @ctx_io if context jld = described_class.expand(@file, **options) expect(jld).to produce_jsonld(JSON.parse(File.read(expanded)), logger) end it "expands with serializer" do - options = {logger: logger, adapter: adapter} + options = { logger: logger, adapter: adapter } options[:expandContext] = @ctx_io if context - jld = described_class.expand(@file, serializer: JSON::LD::API.method(:serializer), **options) + jld = described_class.expand(@file, serializer: described_class.method(:serializer), **options) expect(jld).to be_a(String) - expect(JSON.load(jld)).to produce_jsonld(JSON.parse(File.read(expanded)), logger) + expect(JSON.parse(jld)).to produce_jsonld(JSON.parse(File.read(expanded)), logger) end end - + if File.exist?(compacted) && File.exist?(context) it "compacts" do jld = described_class.compact(@file, @ctx_io, adapter: adapter, logger: logger) @@ -98,12 +103,13 @@ end it "compacts with serializer" do - jld = described_class.compact(@file, @ctx_io, serializer: JSON::LD::API.method(:serializer), adapter: adapter, logger: logger) + jld = described_class.compact(@file, @ctx_io, serializer: described_class.method(:serializer), + adapter: adapter, logger: logger) expect(jld).to be_a(String) - expect(JSON.load(jld)).to produce_jsonld(JSON.parse(File.read(compacted)), logger) + expect(JSON.parse(jld)).to produce_jsonld(JSON.parse(File.read(compacted)), logger) end end - + if File.exist?(framed) && File.exist?(frame) it "frames" do File.open(frame) do |frame_io| @@ -114,16 +120,20 @@ it "frames with serializer" do File.open(frame) do |frame_io| - jld = described_class.frame(@file, frame_io, serializer: JSON::LD::API.method(:serializer), adapter: adapter, logger: logger) + jld = described_class.frame(@file, frame_io, serializer: described_class.method(:serializer), + adapter: adapter, logger: logger) expect(jld).to be_a(String) - expect(JSON.load(jld)).to produce_jsonld(JSON.parse(File.read(framed)), logger) + expect(JSON.parse(jld)).to produce_jsonld(JSON.parse(File.read(framed)), logger) end end end - it "toRdf" do - expect(RDF::Repository.load(filename, format: :jsonld, adapter: adapter, logger: logger)).to be_equivalent_graph(RDF::Repository.load(ttl), logger: logger) - end if File.exist?(ttl) + if File.exist?(ttl) + it "toRdf" do + expect(RDF::Repository.load(filename, format: :jsonld, adapter: adapter, + logger: logger)).to be_equivalent_graph(RDF::Repository.load(ttl), logger: logger) + end + end end end end diff --git a/spec/compact_spec.rb b/spec/compact_spec.rb index 717ce72c..e89382e2 100644 --- a/spec/compact_spec.rb +++ b/spec/compact_spec.rb @@ -1,8 +1,9 @@ -# coding: utf-8 +# frozen_string_literal: true + require_relative 'spec_helper' describe JSON::LD::API do - let(:logger) {RDF::Spec.logger} + let(:logger) { RDF::Spec.logger } describe ".compact" do { @@ -87,7 +88,7 @@ "b": "2012-01-04" }) }, - "@list coercion": { + '@list coercion': { input: %({ "http://example.com/b": {"@list": ["c", "d"]} }), @@ -156,7 +157,7 @@ output: %({ "@id": "http://example.com/", "@type": "#{RDF::RDFS.Resource}" - }), + }) }, "@type with array @id" => { input: %({ @@ -167,7 +168,7 @@ output: %({ "@id": "http://example.com/", "@type": "#{RDF::RDFS.Resource}" - }), + }) }, "default language" => { input: %({ @@ -206,9 +207,9 @@ }, "term5": [ "v5", "plain literal" ] }) - }, + } }.each_pair do |title, params| - it(title) {run_compact(params)} + it(title) { run_compact(params) } end context "keyword aliasing" do @@ -225,7 +226,7 @@ "@type": "#{RDF::RDFS.Resource}" }) }, - "@type": { + '@type': { input: %({ "@type": "http://www.w3.org/2000/01/rdf-schema#Resource", "http://example.org/foo": {"@value": "bar", "@type": "http://example.com/type"} @@ -237,7 +238,7 @@ "http://example.org/foo": {"@value": "bar", "type": "http://example.com/type"} }) }, - "@type with @container: @set": { + '@type with @container: @set': { input: %({ "@type": "http://www.w3.org/2000/01/rdf-schema#Resource", "http://example.org/foo": {"@value": "bar", "@type": "http://example.com/type"} @@ -289,9 +290,9 @@ "@context": {"list": "@list"}, "http://example.org/foo": {"list": ["bar"]} }) - }, + } }.each do |title, params| - it(title) {run_compact(params)} + it(title) { run_compact(params) } end end @@ -402,9 +403,9 @@ }, "http://example/t": {"@id": "http://example/id"} }) - }, + } }.each_pair do |title, params| - it(title) {run_compact(params)} + it(title) { run_compact(params) } end end @@ -465,9 +466,9 @@ }), base: "http://example.org/", processingMode: 'json-ld-1.1' - }, + } }.each_pair do |title, params| - it(title) {run_compact(params)} + it(title) { run_compact(params) } end end @@ -532,7 +533,7 @@ }) } }.each_pair do |title, params| - it(title) {run_compact(params)} + it(title) { run_compact(params) } end end @@ -553,27 +554,30 @@ }) } }.each_pair do |title, params| - it(title) {run_compact(params)} + it(title) { run_compact(params) } end end context "context as reference" do let(:remote_doc) do JSON::LD::API::RemoteDocument.new( - %q({"@context": {"b": "http://example.com/b"}}), - documentUrl: "http://example.com/context") + '{"@context": {"b": "http://example.com/b"}}', + documentUrl: "http://example.com/context" + ) end + it "uses referenced context" do JSON::LD::Context.instance_variable_set(:@cache, nil) - input = ::JSON.parse %({ + input = JSON.parse %({ "http://example.com/b": "c" }) - expected = ::JSON.parse %({ + expected = JSON.parse %({ "@context": "http://example.com/context", "b": "c" }) - allow(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", anything).and_yield(remote_doc) - jld = JSON::LD::API.compact(input, "http://example.com/context", logger: logger, validate: true) + allow(described_class).to receive(:documentLoader).with("http://example.com/context", + anything).and_yield(remote_doc) + jld = described_class.compact(input, "http://example.com/context", logger: logger, validate: true) expect(jld).to produce_jsonld(expected, logger) end end @@ -630,7 +634,7 @@ output: %({ "@context": {"foo": {"@id": "http://example.com/foo", "@container": "@list"}}, "foo": [[]] - }), + }) }, "coerced @list containing a list" => { input: %([{ @@ -642,7 +646,7 @@ output: %({ "@context": {"foo": {"@id": "http://example.com/foo", "@container": "@list"}}, "foo": [["baz"]] - }), + }) }, "coerced @list containing an deep list" => { input: %([{ @@ -654,7 +658,7 @@ output: %({ "@context": {"foo": {"@id": "http://example.com/foo", "@container": "@list"}}, "foo": [[["baz"]]] - }), + }) }, "coerced @list containing multiple lists" => { input: %([{ @@ -669,7 +673,7 @@ output: %({ "@context": {"foo": {"@id": "http://example.com/foo", "@container": "@list"}}, "foo": [["a"], ["b"]] - }), + }) }, "coerced @list containing mixed list values" => { input: %([{ @@ -684,16 +688,16 @@ output: %({ "@context": {"foo": {"@id": "http://example.com/foo", "@container": "@list"}}, "foo": [["a"], "b"] - }), - }, + }) + } }.each_pair do |title, params| - it(title) {run_compact(params)} + it(title) { run_compact(params) } end end context "with @type: @json" do { - "true": { + true => { output: %({ "@context": { "@version": 1.1, @@ -701,11 +705,11 @@ }, "e": true }), - input:%( [{ + input: %( [{ "http://example.org/vocab#bool": [{"@value": true, "@type": "@json"}] - }]), + }]) }, - "false": { + false => { output: %({ "@context": { "@version": 1.1, @@ -715,9 +719,9 @@ }), input: %([{ "http://example.org/vocab#bool": [{"@value": false, "@type": "@json"}] - }]), + }]) }, - "double": { + double: { output: %({ "@context": { "@version": 1.1, @@ -727,9 +731,9 @@ }), input: %([{ "http://example.org/vocab#double": [{"@value": 1.23, "@type": "@json"}] - }]), + }]) }, - "double-zero": { + 'double-zero': { output: %({ "@context": { "@version": 1.1, @@ -739,9 +743,9 @@ }), input: %([{ "http://example.org/vocab#double": [{"@value": 0.0e0, "@type": "@json"}] - }]), + }]) }, - "integer": { + integer: { output: %({ "@context": { "@version": 1.1, @@ -751,9 +755,9 @@ }), input: %([{ "http://example.org/vocab#integer": [{"@value": 123, "@type": "@json"}] - }]), + }]) }, - "string": { + string: { input: %([{ "http://example.org/vocab#string": [{ "@value": "string", @@ -768,7 +772,7 @@ "e": "string" }) }, - "null": { + null: { input: %([{ "http://example.org/vocab#null": [{ "@value": null, @@ -783,7 +787,7 @@ "e": null }) }, - "object": { + object: { output: %({ "@context": { "@version": 1.1, @@ -793,9 +797,9 @@ }), input: %([{ "http://example.org/vocab#object": [{"@value": {"foo": "bar"}, "@type": "@json"}] - }]), + }]) }, - "array": { + array: { output: %({ "@context": { "@version": 1.1, @@ -805,18 +809,18 @@ }), input: %([{ "http://example.org/vocab#array": [{"@value": [{"foo": "bar"}], "@type": "@json"}] - }]), + }]) }, - "Already expanded object": { + 'Already expanded object': { output: %({ "@context": {"@version": 1.1}, "http://example.org/vocab#object": {"@value": {"foo": "bar"}, "@type": "@json"} }), input: %([{ "http://example.org/vocab#object": [{"@value": {"foo": "bar"}, "@type": "@json"}] - }]), + }]) }, - "Already expanded object with aliased keys": { + 'Already expanded object with aliased keys': { output: %({ "@context": {"@version": 1.1, "value": "@value", "type": "@type", "json": "@json"}, "http://example.org/vocab#object": {"value": {"foo": "bar"}, "type": "json"} @@ -824,9 +828,9 @@ input: %([{ "http://example.org/vocab#object": [{"@value": {"foo": "bar"}, "@type": "@json"}] }]) - }, + } }.each do |title, params| - it(title) {run_compact(processingMode: 'json-ld-1.1', **params)} + it(title) { run_compact(processingMode: 'json-ld-1.1', **params) } end end @@ -954,7 +958,7 @@ }), processingMode: 'json-ld-1.1' }, - "issue-514": { + 'issue-514': { input: %({ "http://example.org/ns/prop": [{ "@id": "http://example.org/ns/bar", @@ -989,7 +993,7 @@ } }) }, - "issue-514b": { + 'issue-514b': { input: %({ "http://example.org/ns/prop": [{ "@id": "http://example.org/ns/bar", @@ -1023,14 +1027,14 @@ "bar": { "@id": "ex:bar"} } }) - }, + } }.each_pair do |title, params| - it(title) {run_compact(params)} + it(title) { run_compact(params) } end context "@index: property" do { - "property-valued index indexes property value, instead of property (value)": { + 'property-valued index indexes property value, instead of property (value)': { output: %({ "@context": { "@version": 1.1, @@ -1053,7 +1057,7 @@ ] }]) }, - "property-valued index indexes property value, instead of @index (multiple values)": { + 'property-valued index indexes property value, instead of @index (multiple values)': { output: %({ "@context": { "@version": 1.1, @@ -1079,7 +1083,7 @@ ] }]) }, - "property-valued index extracts property value, instead of @index (node)": { + 'property-valued index extracts property value, instead of @index (node)': { output: %({ "@context": { "@version": 1.1, @@ -1106,7 +1110,7 @@ ] }]) }, - "property-valued index indexes property value, instead of property (multimple nodes)": { + 'property-valued index indexes property value, instead of property (multimple nodes)': { output: %({ "@context": { "@version": 1.1, @@ -1133,7 +1137,7 @@ ] }]) }, - "property-valued index indexes using @none if no property value exists": { + 'property-valued index indexes using @none if no property value exists': { output: %({ "@context": { "@version": 1.1, @@ -1155,7 +1159,7 @@ ] }]) }, - "property-valued index indexes using @none if no property value does not compact to string": { + 'property-valued index indexes using @none if no property value does not compact to string': { output: %({ "@context": { "@version": 1.1, @@ -1182,7 +1186,7 @@ }]) } }.each do |title, params| - it(title) {run_compact(**params)} + it(title) { run_compact(**params) } end end end @@ -1276,7 +1280,7 @@ }), processingMode: "json-ld-1.1" }, - "simple map with term direction": { + 'simple map with term direction': { input: %([ { "@id": "http://example.com/queen", @@ -1316,7 +1320,7 @@ }), processingMode: "json-ld-1.1" }, - "simple map with overriding term direction": { + 'simple map with overriding term direction': { input: %([ { "@id": "http://example.com/queen", @@ -1358,7 +1362,7 @@ }), processingMode: "json-ld-1.1" }, - "simple map with overriding null direction": { + 'simple map with overriding null direction': { input: %([ { "@id": "http://example.com/queen", @@ -1400,7 +1404,7 @@ }), processingMode: "json-ld-1.1" }, - "simple map with mismatching term direction": { + 'simple map with mismatching term direction': { input: %([ { "@id": "http://example.com/queen", @@ -1442,9 +1446,9 @@ ] }), processingMode: "json-ld-1.1" - }, + } }.each_pair do |title, params| - it(title) {run_compact(params)} + it(title) { run_compact(params) } end end @@ -1470,7 +1474,7 @@ "http://example.org/foo": {"label": "Object with @id "}, "_:bar": {"label": "Object with @id _:bar"} } - }), + }) }, "Indexes to object already having an @id" => { input: %([{ @@ -1492,7 +1496,7 @@ "_:foo": {"label": "Object with @id _:bar"}, "http://example.org/bar": {"label": "Object with @id "} } - }), + }) }, "Indexes to object using compact IRI @id" => { input: %([{ @@ -1561,9 +1565,9 @@ "none": {"label": "Object with no @id"} } }) - }, + } }.each_pair do |title, params| - it(title) {run_compact({processingMode: "json-ld-1.1"}.merge(params))} + it(title) { run_compact({ processingMode: "json-ld-1.1" }.merge(params)) } end end @@ -1712,9 +1716,9 @@ "none": {"label": "Object with no @id"} } }) - }, + } }.each_pair do |title, params| - it(title) {run_compact({processingMode: "json-ld-1.1"}.merge(params))} + it(title) { run_compact({ processingMode: "json-ld-1.1" }.merge(params)) } end end @@ -1888,7 +1892,7 @@ }) } }.each_pair do |title, params| - it(title) {run_compact({processingMode: "json-ld-1.1"}.merge(params))} + it(title) { run_compact({ processingMode: "json-ld-1.1" }.merge(params)) } end context "+ @index" do @@ -1986,9 +1990,9 @@ "@graph": {"value": "x"} } }) - }, + } }.each_pair do |title, params| - it(title) {run_compact({processingMode: "json-ld-1.1"}.merge(params))} + it(title) { run_compact({ processingMode: "json-ld-1.1" }.merge(params)) } end end @@ -2150,16 +2154,16 @@ "none" : {"value": "x"} } }) - }, + } }.each_pair do |title, params| - it(title) {run_compact({processingMode: "json-ld-1.1"}.merge(params))} + it(title) { run_compact({ processingMode: "json-ld-1.1" }.merge(params)) } end end end context "@included" do { - "Basic Included array": { + 'Basic Included array': { output: %({ "@context": { "@version": 1.1, @@ -2178,7 +2182,7 @@ }] }]) }, - "Basic Included object": { + 'Basic Included object': { output: %({ "@context": { "@version": 1.1, @@ -2196,7 +2200,7 @@ }] }]) }, - "Multiple properties mapping to @included are folded together": { + 'Multiple properties mapping to @included are folded together': { output: %({ "@context": { "@version": 1.1, @@ -2216,7 +2220,7 @@ ] }]) }, - "Included containing @included": { + 'Included containing @included': { output: %({ "@context": { "@version": 1.1, @@ -2240,7 +2244,7 @@ }] }]) }, - "Property value with @included": { + 'Property value with @included': { output: %({ "@context": { "@version": 1.1, @@ -2261,9 +2265,9 @@ }] }] }]) - }, + } }.each do |title, params| - it(title) {run_compact(params)} + it(title) { run_compact(params) } end end @@ -2374,7 +2378,7 @@ "nestedlist": { "list": ["a", "b"] } - }), + }) }, "Nested @container: @index" => { input: %([{ @@ -2400,7 +2404,7 @@ "B": "b" } } - }), + }) }, "Nested @container: @language" => { input: %([{ @@ -2512,9 +2516,9 @@ "term": {"@id": "http://example/foo", "@nest": "unknown"} }), exception: JSON::LD::JsonLdError::InvalidNestValue - }, + } }.each_pair do |title, params| - it(title) {run_compact({processingMode: "json-ld-1.1"}.merge(params))} + it(title) { run_compact({ processingMode: "json-ld-1.1" }.merge(params)) } end end @@ -2533,9 +2537,9 @@ {"ex:bar": "bar"} ] }) - }, + } }.each_pair do |title, params| - it(title) {run_compact(params)} + it(title) { run_compact(params) } end end @@ -2579,7 +2583,7 @@ "foo": { "bar": "http://example/baz" } - }), + }) }, "property and value with different terms mapping to the same expanded property" => { input: %([ @@ -2603,7 +2607,7 @@ "foo": { "Bar": "baz" } - }), + }) }, "deep @context affects nested nodes" => { input: %([ @@ -2629,7 +2633,7 @@ "baz": "buzz" } } - }), + }) }, "scoped context layers on intemediate contexts" => { input: %([{ @@ -2659,7 +2663,7 @@ "http://example.com/c": "C in example.com" }, "c": "C in example" - }), + }) }, "Raises InvalidTermDefinition if processingMode is 1.0" => { input: %([{ @@ -2673,7 +2677,7 @@ validate: true, exception: JSON::LD::JsonLdError::InvalidTermDefinition }, - "Scoped on id map": { + 'Scoped on id map': { output: %({ "@context": { "@version": 1.1, @@ -2721,9 +2725,9 @@ "http://schema.org/wordCount": [{"@value": 1204}] }] }]) - }, + } }.each_pair do |title, params| - it(title) {run_compact({processingMode: "json-ld-1.1"}.merge(params))} + it(title) { run_compact({ processingMode: "json-ld-1.1" }.merge(params)) } end end @@ -2771,7 +2775,7 @@ "bar": {"@type": "http://www.w3.org/2001/XMLSchema#string"} }, "a": {"@type": "Foo", "bar": "http://example/baz"} - }), + }) }, "alias of @type" => { input: %([ @@ -2794,7 +2798,7 @@ "Foo": {"@context": {"bar": "http://example.org/bar"}} }, "a": {"type": "Foo", "bar": "baz"} - }), + }) }, "deep @context does not affect nested nodes" => { input: %([ @@ -2816,7 +2820,7 @@ }, "@type": "Foo", "bar": {"baz": {"@id": "http://example/buzz"}} - }), + }) }, "scoped context layers on intemediate contexts" => { input: %([{ @@ -2842,7 +2846,7 @@ "http://example.com/a": "A in example.com" }, "c": "C in example" - }), + }) }, "orders lexicographically" => { input: %([{ @@ -2864,7 +2868,7 @@ }, "@type": ["t2", "t1"], "foo": "urn:bar" - }), + }) }, "with @container: @type" => { input: %([{ @@ -2937,10 +2941,10 @@ "@type": "Foo", "bar": {"@id": "http://example.org/baz"} } - }), - }, + }) + } }.each_pair do |title, params| - it(title) {run_compact({processingMode: "json-ld-1.1"}.merge(params))} + it(title) { run_compact({ processingMode: "json-ld-1.1" }.merge(params)) } end end @@ -3035,16 +3039,16 @@ "@context": {"@base": "http://example.org/foo/", "@vocab": ""}, "bar": "term" }) - }, + } }.each do |title, params| - it(title) {run_compact(params)} + it(title) { run_compact(params) } end end end context "html" do { - "Compacts embedded JSON-LD script element": { + 'Compacts embedded JSON-LD script element': { input: %( @@ -3066,7 +3070,7 @@ "foo": ["bar"] }) }, - "Compacts first script element": { + 'Compacts first script element': { input: %( @@ -3097,7 +3101,7 @@ "foo": ["bar"] }) }, - "Compacts targeted script element": { + 'Compacts targeted script element': { input: %( @@ -3130,7 +3134,7 @@ }), base: "http://example.org/doc#second" }, - "Compacts all script elements with extractAllScripts option": { + 'Compacts all script elements with extractAllScripts option': { input: %( @@ -3173,11 +3177,11 @@ ] }), extractAllScripts: true - }, + } }.each do |title, params| it(title) do params[:input] = StringIO.new(params[:input]) - params[:input].send(:define_singleton_method, :content_type) {"text/html"} + params[:input].send(:define_singleton_method, :content_type) { "text/html" } run_compact params.merge(validate: true) end end @@ -3185,7 +3189,7 @@ context "JSON-LD-star" do { - "subject-iii": { + 'subject-iii': { input: %([{ "@id": { "@id": "http://example/s1", @@ -3203,7 +3207,7 @@ "ex:p": {"@id": "ex:o"} }) }, - "subject-iib": { + 'subject-iib': { input: %([{ "@id": { "@id": "http://example/s1", @@ -3221,7 +3225,7 @@ "ex:p": {"@id": "ex:o"} }) }, - "subject-iil": { + 'subject-iil': { input: %([{ "@id": { "@id": "http://example/s1", @@ -3239,7 +3243,7 @@ "ex:p": {"@id": "ex:o"} }) }, - "subject-bii": { + 'subject-bii': { input: %([{ "@id": { "@id": "_:s1", @@ -3257,7 +3261,7 @@ "ex:p": {"@id": "ex:o"} }) }, - "subject-bib": { + 'subject-bib': { input: %([{ "@id": { "@id": "_:s1", @@ -3275,7 +3279,7 @@ "ex:p": {"@id": "ex:o"} }) }, - "subject-bil": { + 'subject-bil': { input: %([{ "@id": { "@id": "_:s1", @@ -3293,7 +3297,7 @@ "ex:p": {"@id": "ex:o"} }) }, - "object-iii": { + 'object-iii': { input: %([{ "@id": "http://example/s", "http://example/p": [{ @@ -3315,7 +3319,7 @@ } }) }, - "object-iib": { + 'object-iib': { input: %([{ "@id": "http://example/s", "http://example/p": [{ @@ -3337,7 +3341,7 @@ } }) }, - "object-iil": { + 'object-iil': { input: %([{ "@id": "http://example/s", "http://example/p": [{ @@ -3359,7 +3363,7 @@ } }) }, - "recursive-subject": { + 'recursive-subject': { input: %([{ "@id": { "@id": { @@ -3382,15 +3386,15 @@ }, "ex:p": {"@id": "ex:o"} }) - }, + } }.each do |name, params| - it(name) {run_compact(params.merge(rdfstar: true))} + it(name) { run_compact(params.merge(rdfstar: true)) } end end context "problem cases" do { - "issue json-ld-framing#64": { + 'issue json-ld-framing#64': { input: %({ "@context": { "@version": 1.1, @@ -3413,7 +3417,7 @@ "Production": { "@context": { "part": { - "@type": "@id", + "@type": "@id", "@container": "@set" } } @@ -3426,7 +3430,7 @@ "Production": { "@context": { "part": { - "@type": "@id", + "@type": "@id", "@container": "@set" } } @@ -3453,22 +3457,26 @@ end def run_compact(params) - input, output, context = params[:input], params[:output], params[:context] + input = params[:input] + output = params[:output] + context = params[:context] params[:base] ||= nil - context ||= output # Since it will have the context - input = ::JSON.parse(input) if input.is_a?(String) - output = ::JSON.parse(output) if output.is_a?(String) - context = ::JSON.parse(context) if context.is_a?(String) + context ||= output # Since it will have the context + input = JSON.parse(input) if input.is_a?(String) + output = JSON.parse(output) if output.is_a?(String) + context = JSON.parse(context) if context.is_a?(String) context = context['@context'] if context.key?('@context') pending params.fetch(:pending, "test implementation") unless input if params[:exception] - expect {JSON::LD::API.compact(input, context, logger: logger, **params)}.to raise_error(params[:exception]) + expect { JSON::LD::API.compact(input, context, logger: logger, **params) }.to raise_error(params[:exception]) else jld = nil if params[:write] - expect{jld = JSON::LD::API.compact(input, context, logger: logger, **params)}.to write(params[:write]).to(:error) + expect do + jld = JSON::LD::API.compact(input, context, logger: logger, **params) + end.to write(params[:write]).to(:error) else - expect{jld = JSON::LD::API.compact(input, context, logger: logger, **params)}.not_to write.to(:error) + expect { jld = JSON::LD::API.compact(input, context, logger: logger, **params) }.not_to write.to(:error) end expect(jld).to produce_jsonld(output, logger) diff --git a/spec/conneg_spec.rb b/spec/conneg_spec.rb index 8341f2f0..aecefce6 100644 --- a/spec/conneg_spec.rb +++ b/spec/conneg_spec.rb @@ -1,11 +1,12 @@ -# coding: utf-8 +# frozen_string_literal: true + require_relative 'spec_helper' require 'rack/linkeddata' require 'rack/test' describe JSON::LD::ContentNegotiation do - include ::Rack::Test::Methods - let(:logger) {RDF::Spec.logger} + include Rack::Test::Methods + let(:logger) { RDF::Spec.logger } let(:app) do described_class.new(double("Target Rack Application", :call => [200, {}, @results || "A String"])) @@ -13,9 +14,9 @@ describe "#parse_accept_header" do { - "application/n-triples, application/ld+json;q=0.5" => %w(application/ld+json), + "application/n-triples, application/ld+json;q=0.5" => %w[application/ld+json], "application/ld+json, application/ld+json;profile=http://www.w3.org/ns/json-ld#compacted" => - %w(application/ld+json;profile=http://www.w3.org/ns/json-ld#compacted application/ld+json), + %w[application/ld+json;profile=http://www.w3.org/ns/json-ld#compacted application/ld+json] }.each do |accept, content_types| it "returns #{content_types.inspect} given #{accept.inspect}" do expect(app.send(:parse_accept_header, accept)).to eq content_types @@ -39,17 +40,17 @@ end describe "#call" do - let(:schema_context) { - JSON::LD::API::RemoteDocument.new(%q({ + let(:schema_context) do + JSON::LD::API::RemoteDocument.new('{ "@context": { "@vocab": "http://schema.org/", "id": "@id", "type": "@type" } - }), documentUrl: "http://schema.org") - } - let(:frame) { - JSON::LD::API::RemoteDocument.new(%q({ + }', documentUrl: "http://schema.org") + end + let(:frame) do + JSON::LD::API::RemoteDocument.new('{ "@context": { "dc": "http://purl.org/dc/elements/1.1/", "ex": "http://example.org/vocab#" @@ -61,21 +62,23 @@ "@type": "ex:Chapter" } } - }), documentUrl: "http://conneg.example.com/frame") - } - let(:context) { - JSON::LD::API::RemoteDocument.new(%q({ + }', documentUrl: "http://conneg.example.com/frame") + end + let(:context) do + JSON::LD::API::RemoteDocument.new('{ "@context": { "dc": "http://purl.org/dc/elements/1.1/", "ex": "http://example.org/vocab#" } - }), documentUrl: "http://conneg.example.com/context") - } + }', documentUrl: "http://conneg.example.com/context") + end - before(:each) do + before do allow(JSON::LD::API).to receive(:documentLoader).with("http://schema.org", any_args).and_yield(schema_context) - allow(JSON::LD::API).to receive(:documentLoader).with("http://conneg.example.com/context", any_args).and_yield(context) - allow(JSON::LD::API).to receive(:documentLoader).with("http://conneg.example.com/frame", any_args).and_yield(frame) + allow(JSON::LD::API).to receive(:documentLoader).with("http://conneg.example.com/context", + any_args).and_yield(context) + allow(JSON::LD::API).to receive(:documentLoader).with("http://conneg.example.com/frame", + any_args).and_yield(frame) end context "with text result" do @@ -86,7 +89,7 @@ end context "with object result" do - before(:each) do + before do @results = LIBRARY_INPUT end @@ -97,9 +100,9 @@ context "with Accept" do { - "application/n-triples" => "406 Not Acceptable (No appropriate combinaion of media-type and parameters found)\n", - "application/json" => LIBRARY_INPUT, - "application/ld+json" => LIBRARY_INPUT, + "application/n-triples" => "406 Not Acceptable (No appropriate combinaion of media-type and parameters found)\n", + "application/json" => LIBRARY_INPUT, + "application/ld+json" => LIBRARY_INPUT, %(application/ld+json;profile=http://www.w3.org/ns/json-ld#expanded) => LIBRARY_EXPANDED, @@ -127,15 +130,15 @@ %(application/ld+json;profile="http://www.w3.org/ns/json-ld#framed http://www.w3.org/ns/json-ld#compacted") => "406 Not Acceptable (framed profile without a frame)\n", %(application/ld+json;profile="http://www.w3.org/ns/json-ld#compacted http://www.w3.org/ns/json-ld#framed") => - "406 Not Acceptable (framed profile without a frame)\n", + "406 Not Acceptable (framed profile without a frame)\n" }.each do |accepts, result| context accepts do - before(:each) do - get '/', {}, {"HTTP_ACCEPT" => accepts} + before do + get '/', {}, { "HTTP_ACCEPT" => accepts } end it "status" do - expect(last_response.status).to satisfy("be 200 or 406") {|x| [200, 406].include?(x)} + expect(last_response.status).to satisfy("be 200 or 406") { |x| [200, 406].include?(x) } end it "sets content type" do @@ -185,15 +188,15 @@ accept: %(application/ld+json;profile=http://www.w3.org/ns/json-ld#framed), link: %( rel="http://www.w3.org/ns/json-ld#context"), result: "406 Not Acceptable (framed profile without a frame)\n" - }, + } }.each do |name, params| context name do - before(:each) do - get '/', {}, {"HTTP_ACCEPT" => params[:accept], "HTTP_LINK" => params[:link]} + before do + get '/', {}, { "HTTP_ACCEPT" => params[:accept], "HTTP_LINK" => params[:link] } end it "status" do - expect(last_response.status).to satisfy("be 200 or 406") {|x| [200, 406].include?(x)} + expect(last_response.status).to satisfy("be 200 or 406") { |x| [200, 406].include?(x) } end it "sets content type" do @@ -215,8 +218,8 @@ end describe Rack::LinkedData::ContentNegotiation do - include ::Rack::Test::Methods - let(:logger) {RDF::Spec.logger} + include Rack::Test::Methods + let(:logger) { RDF::Spec.logger } let(:app) do graph = RDF::NTriples::Reader.new(%( @@ -230,21 +233,21 @@ "The Introduction" . "An introductory chapter on The Republic." . )) - Rack::LinkedData::ContentNegotiation.new(double("Target Rack Application", :call => [200, {}, graph]), {}) + described_class.new(double("Target Rack Application", :call => [200, {}, graph]), {}) end describe "#call" do - let(:schema_context) { - JSON::LD::API::RemoteDocument.new(%q({ + let(:schema_context) do + JSON::LD::API::RemoteDocument.new('{ "@context": { "@vocab": "http://schema.org/", "id": "@id", "type": "@type" } - }), documentUrl: "http://schema.org") - } - let(:frame) { - JSON::LD::API::RemoteDocument.new(%q({ + }', documentUrl: "http://schema.org") + end + let(:frame) do + JSON::LD::API::RemoteDocument.new('{ "@context": { "dc": "http://purl.org/dc/elements/1.1/", "ex": "http://example.org/vocab#" @@ -256,26 +259,28 @@ "@type": "ex:Chapter" } } - }), documentUrl: "http://conneg.example.com/frame") - } - let(:context) { - JSON::LD::API::RemoteDocument.new(%q({ + }', documentUrl: "http://conneg.example.com/frame") + end + let(:context) do + JSON::LD::API::RemoteDocument.new('{ "@context": { "dc": "http://purl.org/dc/elements/1.1/", "ex": "http://example.org/vocab#" } - }), documentUrl: "http://conneg.example.com/context") - } + }', documentUrl: "http://conneg.example.com/context") + end - before(:each) do + before do allow(JSON::LD::API).to receive(:documentLoader).with("http://schema.org", any_args).and_yield(schema_context) - allow(JSON::LD::API).to receive(:documentLoader).with("http://conneg.example.com/context", any_args).and_yield(context) - allow(JSON::LD::API).to receive(:documentLoader).with("http://conneg.example.com/frame", any_args).and_yield(frame) + allow(JSON::LD::API).to receive(:documentLoader).with("http://conneg.example.com/context", + any_args).and_yield(context) + allow(JSON::LD::API).to receive(:documentLoader).with("http://conneg.example.com/frame", + any_args).and_yield(frame) end { - "application/json" => LIBRARY_FLATTENED_EXPANDED, - "application/ld+json" => LIBRARY_FLATTENED_EXPANDED, + "application/json" => LIBRARY_FLATTENED_EXPANDED, + "application/ld+json" => LIBRARY_FLATTENED_EXPANDED, %(application/ld+json;profile=http://www.w3.org/ns/json-ld#expanded) => LIBRARY_FLATTENED_EXPANDED, @@ -292,16 +297,16 @@ %(application/ld+json;profile="http://www.w3.org/ns/json-ld#flattened http://www.w3.org/ns/json-ld#compacted") => LIBRARY_FLATTENED_COMPACTED_DEFAULT, %(application/ld+json;profile="http://www.w3.org/ns/json-ld#compacted http://www.w3.org/ns/json-ld#flattened") => - LIBRARY_FLATTENED_COMPACTED_DEFAULT, + LIBRARY_FLATTENED_COMPACTED_DEFAULT }.each do |accepts, result| context accepts do - before(:each) do - get '/', {}, {"HTTP_ACCEPT" => accepts} + before do + get '/', {}, { "HTTP_ACCEPT" => accepts } end it "status" do - expect(last_response.status).to satisfy("200 or 406") {|x| [200, 406].include?(x)} + expect(last_response.status).to satisfy("200 or 406") { |x| [200, 406].include?(x) } end it "sets content type" do @@ -344,15 +349,15 @@ accept: %(application/ld+json;profile=http://www.w3.org/ns/json-ld#framed), link: %( rel="http://www.w3.org/ns/json-ld#frame"), result: LIBRARY_FRAMED - }, + } }.each do |name, params| context name do - before(:each) do - get '/', {}, {"HTTP_ACCEPT" => params[:accept], "HTTP_LINK" => params[:link]} + before do + get '/', {}, { "HTTP_ACCEPT" => params[:accept], "HTTP_LINK" => params[:link] } end it "status" do - expect(last_response.status).to satisfy("be 200 or 406") {|x| [200, 406].include?(x)} + expect(last_response.status).to satisfy("be 200 or 406") { |x| [200, 406].include?(x) } end it "sets content type" do diff --git a/spec/context_spec.rb b/spec/context_spec.rb index 01bade21..9d12519e 100644 --- a/spec/context_spec.rb +++ b/spec/context_spec.rb @@ -1,48 +1,52 @@ -# coding: utf-8 +# frozen_string_literal: true + require_relative 'spec_helper' require 'rdf/xsd' require 'rdf/spec/reader' # Add for testing -class JSON::LD::Context - # Retrieve type mappings - def coercions - term_definitions.inject({}) do |memo, (t,td)| - memo[t] = td.type_mapping - memo - end - end +module JSON + module LD + class Context + # Retrieve type mappings + def coercions + term_definitions.transform_values(&:type_mapping) + end - def containers - term_definitions.inject({}) do |memo, (t,td)| - memo[t] = td.container_mapping - memo + def containers + term_definitions.transform_values(&:container_mapping) + end end end end describe JSON::LD::Context do - let(:logger) {RDF::Spec.logger} - let(:context) {JSON::LD::Context.new(logger: logger, validate: true, processingMode: "json-ld-1.1", compactToRelative: true)} + subject { context } + + let(:logger) { RDF::Spec.logger } + let(:context) do + described_class.new(logger: logger, validate: true, processingMode: "json-ld-1.1", compactToRelative: true) + end let(:remote_doc) do - JSON::LD::API::RemoteDocument.new(%q({ + JSON::LD::API::RemoteDocument.new('{ "@context": { "xsd": "http://www.w3.org/2001/XMLSchema#", "name": "http://xmlns.com/foaf/0.1/name", "homepage": {"@id": "http://xmlns.com/foaf/0.1/homepage", "@type": "@id"}, "avatar": {"@id": "http://xmlns.com/foaf/0.1/avatar", "@type": "@id"} } - }), - documentUrl: "http://example.com/context", - contentType: "application/ld+json") + }', + documentUrl: "http://example.com/context", + contentType: "application/ld+json") end - subject {context} describe ".parse" do - let(:ctx) {[ - {"foo" => "http://example.com/foo"}, - {"bar" => "foo"} - ]} + let(:ctx) do + [ + { "foo" => "http://example.com/foo" }, + { "bar" => "foo" } + ] + end it "merges definitions from each context" do ec = described_class.parse(ctx) @@ -55,27 +59,30 @@ def containers describe "#parse" do context "remote" do - it "fails given a missing remote @context" do - JSON::LD::Context.instance_variable_set(:@cache, nil) - expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", anything).and_raise(IOError) - expect {subject.parse("http://example.com/context")}.to raise_error(JSON::LD::JsonLdError::LoadingRemoteContextFailed, %r{http://example.com/context}) + described_class.instance_variable_set(:@cache, nil) + expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", + anything).and_raise(IOError) + expect do + subject.parse("http://example.com/context") + end.to raise_error(JSON::LD::JsonLdError::LoadingRemoteContextFailed, %r{http://example.com/context}) end it "creates mappings" do - expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", anything).and_yield(remote_doc) + expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", + anything).and_yield(remote_doc) ec = subject.parse("http://example.com/context") expect(ec.send(:mappings)).to produce({ - "xsd" => "http://www.w3.org/2001/XMLSchema#", - "name" => "http://xmlns.com/foaf/0.1/name", + "xsd" => "http://www.w3.org/2001/XMLSchema#", + "name" => "http://xmlns.com/foaf/0.1/name", "homepage" => "http://xmlns.com/foaf/0.1/homepage", - "avatar" => "http://xmlns.com/foaf/0.1/avatar" + "avatar" => "http://xmlns.com/foaf/0.1/avatar" }, logger) end it "retrieves and parses a remote context document in HTML using the context profile" do remote_doc = - JSON::LD::API::RemoteDocument.new(%q( + JSON::LD::API::RemoteDocument.new(+' - ), + ', documentUrl: "http://example.com/context", contentType: "text/html") - JSON::LD::Context.instance_variable_set(:@cache, nil) - expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", anything).and_yield(remote_doc) + described_class.instance_variable_set(:@cache, nil) + expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", + anything).and_yield(remote_doc) ec = subject.parse("http://example.com/context") expect(ec.send(:mappings)).to produce({ - "xsd" => "http://www.w3.org/2001/XMLSchema#", - "name" => "http://xmlns.com/foaf/0.1/name", + "xsd" => "http://www.w3.org/2001/XMLSchema#", + "name" => "http://xmlns.com/foaf/0.1/name", "homepage" => "http://xmlns.com/foaf/0.1/homepage", - "avatar" => "http://xmlns.com/foaf/0.1/avatar" + "avatar" => "http://xmlns.com/foaf/0.1/avatar" }, logger) end it "retrieves and parses a remote context document in HTML" do remote_doc = - JSON::LD::API::RemoteDocument.new(%q( + JSON::LD::API::RemoteDocument.new(+' - ), + ', documentUrl: "http://example.com/context", contentType: "text/html") - JSON::LD::Context.instance_variable_set(:@cache, nil) - expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", anything).and_yield(remote_doc) + described_class.instance_variable_set(:@cache, nil) + expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", + anything).and_yield(remote_doc) ec = subject.parse("http://example.com/context") expect(ec.send(:mappings)).to produce({ - "xsd" => "http://www.w3.org/2001/XMLSchema#", - "name" => "http://xmlns.com/foaf/0.1/name", + "xsd" => "http://www.w3.org/2001/XMLSchema#", + "name" => "http://xmlns.com/foaf/0.1/name", "homepage" => "http://xmlns.com/foaf/0.1/homepage", - "avatar" => "http://xmlns.com/foaf/0.1/avatar" + "avatar" => "http://xmlns.com/foaf/0.1/avatar" }, logger) end it "notes non-existing @context" do - expect {subject.parse(StringIO.new("{}"))}.to raise_error(JSON::LD::JsonLdError::InvalidRemoteContext) + expect { subject.parse(StringIO.new("{}")) }.to raise_error(JSON::LD::JsonLdError::InvalidRemoteContext) end it "parses a referenced context at a relative URI" do - JSON::LD::Context.instance_variable_set(:@cache, nil) + described_class.instance_variable_set(:@cache, nil) rd1 = JSON::LD::API::RemoteDocument.new(%({"@context": "context"}), base_uri: "http://example.com/c1") expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/c1", anything).and_yield(rd1) - expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", anything).and_yield(remote_doc) + expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", + anything).and_yield(remote_doc) ec = subject.parse("http://example.com/c1") expect(ec.send(:mappings)).to produce({ - "xsd" => "http://www.w3.org/2001/XMLSchema#", - "name" => "http://xmlns.com/foaf/0.1/name", + "xsd" => "http://www.w3.org/2001/XMLSchema#", + "name" => "http://xmlns.com/foaf/0.1/name", "homepage" => "http://xmlns.com/foaf/0.1/homepage", - "avatar" => "http://xmlns.com/foaf/0.1/avatar" + "avatar" => "http://xmlns.com/foaf/0.1/avatar" }, logger) end context "remote with local mappings" do - let(:ctx) {["http://example.com/context", {"integer" => "xsd:integer"}]} - before {JSON::LD::Context.instance_variable_set(:@cache, nil)} + let(:ctx) { ["http://example.com/context", { "integer" => "xsd:integer" }] } + + before { described_class.instance_variable_set(:@cache, nil) } + it "retrieves and parses a remote context document" do - expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", anything).and_yield(remote_doc) + expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", + anything).and_yield(remote_doc) ec = subject.parse(ctx) expect(ec.send(:mappings)).to produce({ - "xsd" => "http://www.w3.org/2001/XMLSchema#", - "name" => "http://xmlns.com/foaf/0.1/name", + "xsd" => "http://www.w3.org/2001/XMLSchema#", + "name" => "http://xmlns.com/foaf/0.1/name", "homepage" => "http://xmlns.com/foaf/0.1/homepage", - "avatar" => "http://xmlns.com/foaf/0.1/avatar", - "integer" => "http://www.w3.org/2001/XMLSchema#integer" + "avatar" => "http://xmlns.com/foaf/0.1/avatar", + "integer" => "http://www.w3.org/2001/XMLSchema#integer" }, logger) end end context "pre-loaded remote" do - let(:ctx) {"http://example.com/preloaded"} - before(:all) { - JSON::LD::Context.add_preloaded("http://example.com/preloaded", - JSON::LD::Context.parse({'foo' => "http://example.com/"}) - ) - JSON::LD::Context.alias_preloaded("https://example.com/preloaded", "http://example.com/preloaded") - } - after(:all) {JSON::LD::Context.instance_variable_set(:@cache, nil)} + let(:ctx) { "http://example.com/preloaded" } + + before(:all) do + described_class.add_preloaded("http://example.com/preloaded", + described_class.parse({ 'foo' => "http://example.com/" })) + described_class.alias_preloaded("https://example.com/preloaded", "http://example.com/preloaded") + end + + after(:all) { described_class.instance_variable_set(:@cache, nil) } it "does not load referenced context" do expect(JSON::LD::API).not_to receive(:documentLoader).with(ctx, anything) @@ -215,24 +229,26 @@ def containers it "uses loaded context" do ec = subject.parse(ctx) expect(ec.send(:mappings)).to produce({ - "foo" => "http://example.com/" + "foo" => "http://example.com/" }, logger) end it "uses aliased context" do ec = subject.parse(ctx.sub('http', 'https')) expect(ec.send(:mappings)).to produce({ - "foo" => "http://example.com/" + "foo" => "http://example.com/" }, logger) end end end context "Array" do - let(:ctx) {[ - {"foo" => "http://example.com/foo"}, - {"bar" => "foo"} - ]} + let(:ctx) do + [ + { "foo" => "http://example.com/foo" }, + { "bar" => "foo" } + ] + end it "merges definitions from each context" do ec = subject.parse(ctx) @@ -243,21 +259,22 @@ def containers end it "merges definitions from remote contexts" do - JSON::LD::Context.instance_variable_set(:@cache, nil) - expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", anything).and_yield(remote_doc) - rd2 = JSON::LD::API::RemoteDocument.new(%q({ + described_class.instance_variable_set(:@cache, nil) + expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", + anything).and_yield(remote_doc) + rd2 = JSON::LD::API::RemoteDocument.new('{ "@context": { "title": {"@id": "http://purl.org/dc/terms/title"} } - }), base_uri: "http://example.com/c2") + }', base_uri: "http://example.com/c2") expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/c2", anything).and_yield(rd2) - ec = subject.parse(%w(http://example.com/context http://example.com/c2)) + ec = subject.parse(%w[http://example.com/context http://example.com/c2]) expect(ec.send(:mappings)).to produce({ - "xsd" => "http://www.w3.org/2001/XMLSchema#", - "name" => "http://xmlns.com/foaf/0.1/name", + "xsd" => "http://www.w3.org/2001/XMLSchema#", + "name" => "http://xmlns.com/foaf/0.1/name", "homepage" => "http://xmlns.com/foaf/0.1/homepage", - "avatar" => "http://xmlns.com/foaf/0.1/avatar", - "title" => "http://purl.org/dc/terms/title" + "avatar" => "http://xmlns.com/foaf/0.1/avatar", + "title" => "http://purl.org/dc/terms/title" }, logger) end end @@ -295,7 +312,7 @@ def containers it "maps term with @id" do expect(subject.parse({ - "foo" => {"@id" => "http://example.com/"} + "foo" => { "@id" => "http://example.com/" } }).send(:mappings)).to produce({ "foo" => "http://example.com/" }, logger) @@ -304,7 +321,7 @@ def containers it "maps term with blank node @id (with deprecation)" do expect do expect(subject.parse({ - "foo" => {"@id" => "_:bn"} + "foo" => { "@id" => "_:bn" } }).send(:mappings)).to produce({ "foo" => RDF::Node("bn") }, logger) @@ -314,7 +331,7 @@ def containers it "warns and ignores keyword-like term" do expect do expect(subject.parse({ - "@foo" => {"@id" => "http://example.org/foo"} + "@foo" => { "@id" => "http://example.org/foo" } }).send(:mappings)).to produce({}, logger) end.to write("Terms beginning with '@' are reserved").to(:error) end @@ -322,7 +339,7 @@ def containers it "maps '@' as a term" do expect do expect(subject.parse({ - "@" => {"@id" => "http://example.org/@"} + "@" => { "@id" => "http://example.org/@" } }).send(:mappings)).to produce({ "@" => "http://example.org/@" }, logger) @@ -332,7 +349,7 @@ def containers it "maps '@foo.bar' as a term" do expect do expect(subject.parse({ - "@foo.bar" => {"@id" => "http://example.org/foo.bar"} + "@foo.bar" => { "@id" => "http://example.org/foo.bar" } }).send(:mappings)).to produce({ "@foo.bar" => "http://example.org/foo.bar" }, logger) @@ -341,7 +358,7 @@ def containers it "associates @list container mapping with term" do expect(subject.parse({ - "foo" => {"@id" => "http://example.com/", "@container" => "@list"} + "foo" => { "@id" => "http://example.com/", "@container" => "@list" } }).containers).to produce({ "foo" => Set["@list"] }, logger) @@ -349,7 +366,7 @@ def containers it "associates @type container mapping with term" do expect(subject.parse({ - "foo" => {"@id" => "http://example.com/", "@container" => "@type"} + "foo" => { "@id" => "http://example.com/", "@container" => "@type" } }).containers).to produce({ "foo" => Set["@type"] }, logger) @@ -357,7 +374,7 @@ def containers it "associates @id container mapping with term" do expect(subject.parse({ - "foo" => {"@id" => "http://example.com/", "@container" => "@id"} + "foo" => { "@id" => "http://example.com/", "@container" => "@id" } }).containers).to produce({ "foo" => Set["@id"] }, logger) @@ -365,7 +382,7 @@ def containers it "associates @id type mapping with term" do expect(subject.parse({ - "foo" => {"@id" => "http://example.com/", "@type" => "@id"} + "foo" => { "@id" => "http://example.com/", "@type" => "@id" } }).coercions).to produce({ "foo" => "@id" }, logger) @@ -373,7 +390,7 @@ def containers it "associates @json type mapping with term" do expect(subject.parse({ - "foo" => {"@id" => "http://example.com/", "@type" => "@json"} + "foo" => { "@id" => "http://example.com/", "@type" => "@json" } }).coercions).to produce({ "foo" => "@json" }, logger) @@ -381,7 +398,7 @@ def containers it "associates type mapping with term" do expect(subject.parse({ - "foo" => {"@id" => "http://example.com/", "@type" => RDF::XSD.string.to_s} + "foo" => { "@id" => "http://example.com/", "@type" => RDF::XSD.string.to_s } }).coercions).to produce({ "foo" => RDF::XSD.string }, logger) @@ -389,7 +406,7 @@ def containers it "associates language mapping with term" do expect(subject.parse({ - "foo" => {"@id" => "http://example.com/", "@language" => "en"} + "foo" => { "@id" => "http://example.com/", "@language" => "en" } }).send(:languages)).to produce({ "foo" => "en" }, logger) @@ -419,37 +436,37 @@ def containers context "with null" do it "removes @language if set to null" do expect(subject.parse([ - { - "@language" => "en" - }, - { - "@language" => nil - } - ]).default_language).to produce(nil, logger) + { + "@language" => "en" + }, + { + "@language" => nil + } + ]).default_language).to produce(nil, logger) end it "removes @vocab if set to null" do expect(subject.parse([ - { - "@vocab" => "http://schema.org/" - }, - { - "@vocab" => nil - } - ]).vocab).to produce(nil, logger) + { + "@vocab" => "http://schema.org/" + }, + { + "@vocab" => nil + } + ]).vocab).to produce(nil, logger) end it "removes term if set to null with @vocab" do expect(subject.parse([ - { - "@vocab" => "http://schema.org/", - "term" => nil - } - ]).send(:mappings)).to produce({"term" => nil}, logger) + { + "@vocab" => "http://schema.org/", + "term" => nil + } + ]).send(:mappings)).to produce({ "term" => nil }, logger) end it "loads initial context" do - init_ec = JSON::LD::Context.new + init_ec = described_class.new nil_ec = subject.parse(nil) expect(nil_ec.default_language).to eq init_ec.default_language expect(nil_ec.send(:languages)).to eq init_ec.send(:languages) @@ -459,29 +476,35 @@ def containers end it "removes a term definition" do - expect(subject.parse({"name" => nil}).send(:mapping, "name")).to be_nil + expect(subject.parse({ "name" => nil }).send(:mapping, "name")).to be_nil end end - context "@propagate" do it "generates an InvalidPropagateValue error if not a boolean" do - expect {subject.parse({'@version' => 1.1, '@propagate' => "String"})}.to raise_error(JSON::LD::JsonLdError::InvalidPropagateValue) + expect do + subject.parse({ '@version' => 1.1, + '@propagate' => "String" }) + end.to raise_error(JSON::LD::JsonLdError::InvalidPropagateValue) end end context "@import" do - before(:each) {JSON::LD::Context.instance_variable_set(:@cache, nil)} + before { described_class.instance_variable_set(:@cache, nil) } + it "generates an InvalidImportValue error if not a string" do - expect {subject.parse({'@version' => 1.1, '@import' => true})}.to raise_error(JSON::LD::JsonLdError::InvalidImportValue) + expect do + subject.parse({ '@version' => 1.1, '@import' => true }) + end.to raise_error(JSON::LD::JsonLdError::InvalidImportValue) end it "retrieves remote context" do - expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", anything).and_yield(remote_doc) - ec = subject.parse(JSON.parse %({ + expect(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", + anything).and_yield(remote_doc) + ec = subject.parse(JSON.parse(%({ "@version": 1.1, "@import": "http://example.com/context" - })) + }))) expect(ec.term_definitions).to include("avatar") end end @@ -489,109 +512,116 @@ def containers describe "Syntax Errors" do { - "malformed JSON" => StringIO.new(%q({"@context": {"foo" "http://malformed/"})), - "no @id, @type, or @container" => {"foo" => {}}, - "value as array" => {"foo" => []}, - "@id as object" => {"foo" => {"@id" => {}}}, - "@id as array of object" => {"foo" => {"@id" => [{}]}}, - "@id as array of null" => {"foo" => {"@id" => [nil]}}, - "@type as object" => {"foo" => {"@type" => {}}}, - "@type as array" => {"foo" => {"@type" => []}}, - "@type as @list" => {"foo" => {"@type" => "@list"}}, - "@type as @none" => {"@version" => 1.1, "foo" => {"@type" => "@none"}}, - "@type as @set" => {"foo" => {"@type" => "@set"}}, - "@container as object" => {"foo" => {"@container" => {}}}, - "@container as empty array" => {"foo" => {"@container" => []}}, - "@container as string" => {"foo" => {"@container" => "true"}}, - "@context which is invalid" => {"foo" => {"@context" => {"bar" => []}}}, - "@language as @id" => {"@language" => {"@id" => "http://example.com/"}}, - "@direction as foo" => {"@direction" => "foo"}, - "@vocab as @id" => {"@vocab" => {"@id" => "http://example.com/"}}, - "@prefix string" => {"foo" => {"@id" => 'http://example.org/', "@prefix" => "str"}}, - "@prefix array" => {"foo" => {"@id" => 'http://example.org/', "@prefix" => []}}, - "@prefix object" => {"foo" => {"@id" => 'http://example.org/', "@prefix" => {}}}, + "malformed JSON" => StringIO.new('{"@context": {"foo" "http://malformed/"}'), + "no @id, @type, or @container" => { "foo" => {} }, + "value as array" => { "foo" => [] }, + "@id as object" => { "foo" => { "@id" => {} } }, + "@id as array of object" => { "foo" => { "@id" => [{}] } }, + "@id as array of null" => { "foo" => { "@id" => [nil] } }, + "@type as object" => { "foo" => { "@type" => {} } }, + "@type as array" => { "foo" => { "@type" => [] } }, + "@type as @list" => { "foo" => { "@type" => "@list" } }, + "@type as @none" => { "@version" => 1.1, "foo" => { "@type" => "@none" } }, + "@type as @set" => { "foo" => { "@type" => "@set" } }, + "@container as object" => { "foo" => { "@container" => {} } }, + "@container as empty array" => { "foo" => { "@container" => [] } }, + "@container as string" => { "foo" => { "@container" => "true" } }, + "@context which is invalid" => { "foo" => { "@context" => { "bar" => [] } } }, + "@language as @id" => { "@language" => { "@id" => "http://example.com/" } }, + "@direction as foo" => { "@direction" => "foo" }, + "@vocab as @id" => { "@vocab" => { "@id" => "http://example.com/" } }, + "@prefix string" => { "foo" => { "@id" => 'http://example.org/', "@prefix" => "str" } }, + "@prefix array" => { "foo" => { "@id" => 'http://example.org/', "@prefix" => [] } }, + "@prefix object" => { "foo" => { "@id" => 'http://example.org/', "@prefix" => {} } }, "IRI term expands to different IRI" => { "ex" => "http://example.com/", "ex2" => "http://example.com/2/", "ex:foo" => "ex2:foo" - }, + }, "IRI term expands to different IRI (reverse)" => { "ex" => "http://example.com/", "ex2" => "http://example.com/2/", - "ex:foo" => {"@reverse" => "ex2:foo"} + "ex:foo" => { "@reverse" => "ex2:foo" } } }.each do |title, context| it title do - expect { + expect do ec = subject.parse(context) expect(ec.serialize).to produce({}, logger) - }.to raise_error(JSON::LD::JsonLdError) + end.to raise_error(JSON::LD::JsonLdError) end end context "1.0" do - let(:context) {JSON::LD::Context.new(logger: logger, validate: true, processingMode: 'json-ld-1.0')} + let(:context) { described_class.new(logger: logger, validate: true, processingMode: 'json-ld-1.0') } + { - "@context" => {"foo" => {"@id" => 'http://example.org/', "@context" => {}}}, - "@container @id" => {"foo" => {"@container" => "@id"}}, - "@container @type" => {"foo" => {"@container" => "@type"}}, - "@nest" => {"foo" => {"@id" => 'http://example.org/', "@nest" => "@nest"}}, - "@type as @none" => {"foo" => {"@type" => "@none"}}, - "@type as @json" => {"foo" => {"@type" => "@json"}}, - "@prefix" => {"foo" => {"@id" => 'http://example.org/', "@prefix" => true}}, + "@context" => { "foo" => { "@id" => 'http://example.org/', "@context" => {} } }, + "@container @id" => { "foo" => { "@container" => "@id" } }, + "@container @type" => { "foo" => { "@container" => "@type" } }, + "@nest" => { "foo" => { "@id" => 'http://example.org/', "@nest" => "@nest" } }, + "@type as @none" => { "foo" => { "@type" => "@none" } }, + "@type as @json" => { "foo" => { "@type" => "@json" } }, + "@prefix" => { "foo" => { "@id" => 'http://example.org/', "@prefix" => true } } }.each do |title, context| it title do - expect { + expect do ec = subject.parse(context) expect(ec.serialize).to produce({}, logger) - }.to raise_error(JSON::LD::JsonLdError) + end.to raise_error(JSON::LD::JsonLdError) end end it "generates InvalidContextEntry if using @propagate" do - expect {context.parse({'@propagate' => true})}.to raise_error(JSON::LD::JsonLdError::InvalidContextEntry) + expect { context.parse({ '@propagate' => true }) }.to raise_error(JSON::LD::JsonLdError::InvalidContextEntry) end it "generates InvalidContextEntry if using @import" do - expect {context.parse({'@import' => "location"})}.to raise_error(JSON::LD::JsonLdError::InvalidContextEntry) + expect do + context.parse({ '@import' => "location" }) + end.to raise_error(JSON::LD::JsonLdError::InvalidContextEntry) end - (JSON::LD::KEYWORDS - %w(@base @language @version @protected @propagate @vocab)).each do |kw| + (JSON::LD::KEYWORDS - %w[@base @language @version @protected @propagate @vocab]).each do |kw| it "does not redefine #{kw} with an @container" do - expect { - ec = subject.parse({kw => {"@container" => "@set"}}) + expect do + ec = subject.parse({ kw => { "@container" => "@set" } }) expect(ec.serialize).to produce({}, logger) - }.to raise_error(JSON::LD::JsonLdError) + end.to raise_error(JSON::LD::JsonLdError) end end end - (JSON::LD::KEYWORDS - %w(@base @direction @language @protected @propagate @import @version @vocab)).each do |kw| + (JSON::LD::KEYWORDS - %w[@base @direction @language @protected @propagate @import @version @vocab]).each do |kw| it "does not redefine #{kw} as a string" do - expect { - ec = subject.parse({kw => "http://example.com/"}) + expect do + ec = subject.parse({ kw => "http://example.com/" }) expect(ec.serialize).to produce({}, logger) - }.to raise_error(JSON::LD::JsonLdError) + end.to raise_error(JSON::LD::JsonLdError) end it "does not redefine #{kw} with an @id" do - expect { - ec = subject.parse({kw => {"@id" => "http://example.com/"}}) + expect do + ec = subject.parse({ kw => { "@id" => "http://example.com/" } }) expect(ec.serialize).to produce({}, logger) - }.to raise_error(JSON::LD::JsonLdError) + end.to raise_error(JSON::LD::JsonLdError) end - it "does not redefine #{kw} with an @container" do - expect { - ec = subject.parse({"@version" => 1.1, kw => {"@container" => "@set"}}) - expect(ec.serialize).to produce({}, logger) - }.to raise_error(JSON::LD::JsonLdError) - end unless kw == '@type' + unless kw == '@type' + it "does not redefine #{kw} with an @container" do + expect do + ec = subject.parse({ "@version" => 1.1, kw => { "@container" => "@set" } }) + expect(ec.serialize).to produce({}, logger) + end.to raise_error(JSON::LD::JsonLdError) + end + end + + next unless kw == '@type' it "redefines #{kw} with an @container" do - ec = subject.parse({kw => {"@container" => "@set"}}) + ec = subject.parse({ kw => { "@container" => "@set" } }) expect(ec.as_array('@type')).to be_truthy - end if kw == '@type' + end end end end @@ -600,9 +630,9 @@ def containers it "sets to json-ld-1.1 if @version: 1.1" do [ %({"@version": 1.1}), - %([{"@version": 1.1}]), + %([{"@version": 1.1}]) ].each do |str| - ctx = JSON::LD::Context.parse(::JSON.parse(str)) + ctx = described_class.parse(JSON.parse(str)) expect(ctx.processingMode).to eql "json-ld-1.1" end end @@ -614,22 +644,27 @@ def containers 1.0, "foo" ].each do |vers| - expect {JSON::LD::Context.parse({"@version" => vers})}.to raise_error(JSON::LD::JsonLdError::InvalidVersionValue) + expect do + described_class.parse({ "@version" => vers }) + end.to raise_error(JSON::LD::JsonLdError::InvalidVersionValue) end end it "raises ProcessingModeConflict if provided processing mode conflicts with context" do - expect {JSON::LD::Context.parse({"@version" => 1.1}, processingMode: "json-ld-1.0")}.to raise_error(JSON::LD::JsonLdError::ProcessingModeConflict) + expect do + described_class.parse({ "@version" => 1.1 }, + processingMode: "json-ld-1.0") + end.to raise_error(JSON::LD::JsonLdError::ProcessingModeConflict) end it "does not raise ProcessingModeConflict nested context is different from starting context" do - expect {JSON::LD::Context.parse([{}, {"@version" => 1.1}])}.not_to raise_error + expect { described_class.parse([{}, { "@version" => 1.1 }]) }.not_to raise_error end end describe "#merge" do it "creates a new context with components of each" do - c2 = JSON::LD::Context.parse({'foo' => "http://example.com/"}) + c2 = described_class.parse({ 'foo' => "http://example.com/" }) cm = context.merge(c2) expect(cm).not_to equal context expect(cm).not_to equal c2 @@ -638,9 +673,10 @@ def containers end describe "#serialize" do - before {JSON::LD::Context.instance_variable_set(:@cache, nil)} + before { described_class.instance_variable_set(:@cache, nil) } + it "context hash" do - ctx = {"foo" => "http://example.com/"} + ctx = { "foo" => "http://example.com/" } ec = subject.parse(ctx) expect(ec.serialize).to produce({ @@ -667,8 +703,8 @@ def containers end it "term mappings" do - c = subject. - parse({'foo' => "http://example.com/"}) + c = subject + .parse({ 'foo' => "http://example.com/" }) expect(c.serialize).to produce({ "@context" => { "foo" => "http://example.com/" @@ -678,13 +714,13 @@ def containers it "@context" do expect(subject.parse({ - "foo" => {"@id" => "http://example.com/", "@context" => {"bar" => "http://example.com/baz"}} - }). - serialize).to produce({ + "foo" => { "@id" => "http://example.com/", "@context" => { "bar" => "http://example.com/baz" } } + }) + .serialize).to produce({ "@context" => { "foo" => { "@id" => "http://example.com/", - "@context" => {"bar" => "http://example.com/baz"} + "@context" => { "bar" => "http://example.com/baz" } } } }, logger) @@ -693,45 +729,45 @@ def containers it "@type with dependent prefixes in a single context" do expect(subject.parse({ 'xsd' => "http://www.w3.org/2001/XMLSchema#", - 'homepage' => {'@id' => RDF::Vocab::FOAF.homepage.to_s, '@type' => '@id'} - }). - serialize).to produce({ + 'homepage' => { '@id' => RDF::Vocab::FOAF.homepage.to_s, '@type' => '@id' } + }) + .serialize).to produce({ "@context" => { "xsd" => RDF::XSD.to_uri.to_s, - "homepage" => {"@id" => RDF::Vocab::FOAF.homepage.to_s, "@type" => "@id"} + "homepage" => { "@id" => RDF::Vocab::FOAF.homepage.to_s, "@type" => "@id" } } }, logger) end it "@list with @id definition in a single context" do expect(subject.parse({ - 'knows' => {'@id' => RDF::Vocab::FOAF.knows.to_s, '@container' => '@list'} - }). - serialize).to produce({ + 'knows' => { '@id' => RDF::Vocab::FOAF.knows.to_s, '@container' => '@list' } + }) + .serialize).to produce({ "@context" => { - "knows" => {"@id" => RDF::Vocab::FOAF.knows.to_s, "@container" => "@list"} + "knows" => { "@id" => RDF::Vocab::FOAF.knows.to_s, "@container" => "@list" } } }, logger) end it "@set with @id definition in a single context" do expect(subject.parse({ - "knows" => {"@id" => RDF::Vocab::FOAF.knows.to_s, "@container" => "@set"} - }). - serialize).to produce({ + "knows" => { "@id" => RDF::Vocab::FOAF.knows.to_s, "@container" => "@set" } + }) + .serialize).to produce({ "@context" => { - "knows" => {"@id" => RDF::Vocab::FOAF.knows.to_s, "@container" => "@set"} + "knows" => { "@id" => RDF::Vocab::FOAF.knows.to_s, "@container" => "@set" } } }, logger) end it "@language with @id definition in a single context" do expect(subject.parse({ - "name" => {"@id" => RDF::Vocab::FOAF.name.to_s, "@language" => "en"} - }). - serialize).to produce({ + "name" => { "@id" => RDF::Vocab::FOAF.name.to_s, "@language" => "en" } + }) + .serialize).to produce({ "@context" => { - "name" => {"@id" => RDF::Vocab::FOAF.name.to_s, "@language" => "en"} + "name" => { "@id" => RDF::Vocab::FOAF.name.to_s, "@language" => "en" } } }, logger) end @@ -739,12 +775,12 @@ def containers it "@language with @id definition in a single context and equivalent default" do expect(subject.parse({ "@language" => 'en', - "name" => {"@id" => RDF::Vocab::FOAF.name.to_s, "@language" => 'en'} - }). - serialize).to produce({ + "name" => { "@id" => RDF::Vocab::FOAF.name.to_s, "@language" => 'en' } + }) + .serialize).to produce({ "@context" => { "@language" => 'en', - "name" => {"@id" => RDF::Vocab::FOAF.name.to_s, "@language" => 'en'} + "name" => { "@id" => RDF::Vocab::FOAF.name.to_s, "@language" => 'en' } } }, logger) end @@ -752,12 +788,12 @@ def containers it "@language with @id definition in a single context and different default" do expect(subject.parse({ "@language" => 'en', - "name" => {"@id" => RDF::Vocab::FOAF.name.to_s, "@language" => "de"} - }). - serialize).to produce({ + "name" => { "@id" => RDF::Vocab::FOAF.name.to_s, "@language" => "de" } + }) + .serialize).to produce({ "@context" => { "@language" => 'en', - "name" => {"@id" => RDF::Vocab::FOAF.name.to_s, "@language" => "de"} + "name" => { "@id" => RDF::Vocab::FOAF.name.to_s, "@language" => "de" } } }, logger) end @@ -765,45 +801,45 @@ def containers it "null @language with @id definition in a single context and default" do expect(subject.parse({ "@language" => 'en', - "name" => {"@id" => RDF::Vocab::FOAF.name.to_s, "@language" => nil} - }). - serialize).to produce({ + "name" => { "@id" => RDF::Vocab::FOAF.name.to_s, "@language" => nil } + }) + .serialize).to produce({ "@context" => { "@language" => 'en', - "name" => {"@id" => RDF::Vocab::FOAF.name.to_s, "@language" => nil} + "name" => { "@id" => RDF::Vocab::FOAF.name.to_s, "@language" => nil } } }, logger) end it "prefix with @type and @list" do expect(subject.parse({ - "knows" => {"@id" => RDF::Vocab::FOAF.knows.to_s, "@type" => "@id", "@container" => "@list"} - }). - serialize).to produce({ + "knows" => { "@id" => RDF::Vocab::FOAF.knows.to_s, "@type" => "@id", "@container" => "@list" } + }) + .serialize).to produce({ "@context" => { - "knows" => {"@id" => RDF::Vocab::FOAF.knows.to_s, "@type" => "@id", "@container" => "@list"} + "knows" => { "@id" => RDF::Vocab::FOAF.knows.to_s, "@type" => "@id", "@container" => "@list" } } }, logger) end it "prefix with @type and @set" do expect(subject.parse({ - "knows" => {"@id" => RDF::Vocab::FOAF.knows.to_s, "@type" => "@id", "@container" => "@set"} - }). - serialize).to produce({ + "knows" => { "@id" => RDF::Vocab::FOAF.knows.to_s, "@type" => "@id", "@container" => "@set" } + }) + .serialize).to produce({ "@context" => { - "knows" => {"@id" => RDF::Vocab::FOAF.knows.to_s, "@type" => "@id", "@container" => "@set"} + "knows" => { "@id" => RDF::Vocab::FOAF.knows.to_s, "@type" => "@id", "@container" => "@set" } } }, logger) end it "prefix with @type @json" do expect(subject.parse({ - "knows" => {"@id" => RDF::Vocab::FOAF.knows.to_s, "@type" => "@json"} - }). - serialize).to produce({ + "knows" => { "@id" => RDF::Vocab::FOAF.knows.to_s, "@type" => "@json" } + }) + .serialize).to produce({ "@context" => { - "knows" => {"@id" => RDF::Vocab::FOAF.knows.to_s, "@type" => "@json"} + "knows" => { "@id" => RDF::Vocab::FOAF.knows.to_s, "@type" => "@json" } } }, logger) end @@ -814,8 +850,8 @@ def containers "foaf:knows" => { "@container" => "@list" } - }). - serialize).to produce({ + }) + .serialize).to produce({ "@context" => { "foaf" => RDF::Vocab::FOAF.to_uri.to_s, "foaf:knows" => { @@ -828,12 +864,12 @@ def containers it "does not use aliased @id in key position" do expect(subject.parse({ "id" => "@id", - "knows" => {"@id" => RDF::Vocab::FOAF.knows.to_s, "@container" => "@list"} - }). - serialize).to produce({ + "knows" => { "@id" => RDF::Vocab::FOAF.knows.to_s, "@container" => "@list" } + }) + .serialize).to produce({ "@context" => { "id" => "@id", - "knows" => {"@id" => RDF::Vocab::FOAF.knows.to_s, "@container" => "@list"} + "knows" => { "@id" => RDF::Vocab::FOAF.knows.to_s, "@container" => "@list" } } }, logger) end @@ -845,8 +881,8 @@ def containers "foaf:homepage" => { "@type" => "@id" } - }). - serialize).to produce({ + }) + .serialize).to produce({ "@context" => { "foaf" => RDF::Vocab::FOAF.to_uri.to_s, "id" => "@id", @@ -861,13 +897,13 @@ def containers expect(subject.parse({ "foaf" => RDF::Vocab::FOAF.to_uri.to_s, "type" => "@type", - "foaf:homepage" => {"@type" => "@id"} - }). - serialize).to produce({ + "foaf:homepage" => { "@type" => "@id" } + }) + .serialize).to produce({ "@context" => { "foaf" => RDF::Vocab::FOAF.to_uri.to_s, "type" => "@type", - "foaf:homepage" => {"@type" => "@id"} + "foaf:homepage" => { "@type" => "@id" } } }, logger) end @@ -875,12 +911,12 @@ def containers it "does not use aliased @container" do expect(subject.parse({ "container" => "@container", - "knows" => {"@id" => RDF::Vocab::FOAF.knows.to_s, "@container" => "@list"} - }). - serialize).to produce({ + "knows" => { "@id" => RDF::Vocab::FOAF.knows.to_s, "@container" => "@list" } + }) + .serialize).to produce({ "@context" => { "container" => "@container", - "knows" => {"@id" => RDF::Vocab::FOAF.knows.to_s, "@container" => "@list"} + "knows" => { "@id" => RDF::Vocab::FOAF.knows.to_s, "@container" => "@list" } } }, logger) end @@ -888,12 +924,12 @@ def containers it "compacts IRIs to Compact IRIs" do expect(subject.parse({ "ex" => 'http://example.org/', - "term" => {"@id" => "ex:term", "@type" => "ex:datatype"} - }). - serialize).to produce({ + "term" => { "@id" => "ex:term", "@type" => "ex:datatype" } + }) + .serialize).to produce({ "@context" => { "ex" => 'http://example.org/', - "term" => {"@id" => "ex:term", "@type" => "ex:datatype"} + "term" => { "@id" => "ex:term", "@type" => "ex:datatype" } } }, logger) end @@ -901,39 +937,40 @@ def containers it "compacts IRIs using @vocab" do expect(subject.parse({ "@vocab" => 'http://example.org/', - "term" => {"@id" => "http://example.org/term", "@type" => "datatype"} - }). - serialize).to produce({ + "term" => { "@id" => "http://example.org/term", "@type" => "datatype" } + }) + .serialize).to produce({ "@context" => { "@vocab" => 'http://example.org/', - "term" => {"@type" => "datatype"} + "term" => { "@type" => "datatype" } } }, logger) end context "invalid term definitions" do { - "empty term": { - input: {"" => "http://blank-term/"} + 'empty term': { + input: { "" => "http://blank-term/" } }, - "extra key": { - input: {"foo" => {"@id" => "http://example.com/foo", "@baz" => "foobar"}} + 'extra key': { + input: { "foo" => { "@id" => "http://example.com/foo", "@baz" => "foobar" } } } }.each do |title, params| it title do - expect {subject.parse(params[:input])}.to raise_error(JSON::LD::JsonLdError::InvalidTermDefinition) + expect { subject.parse(params[:input]) }.to raise_error(JSON::LD::JsonLdError::InvalidTermDefinition) end end end - end describe "#to_rb" do - before(:all) {JSON::LD::Context.instance_variable_set(:@cache, nil)} - subject { - allow(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", anything).and_yield(remote_doc) + subject do + allow(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", + anything).and_yield(remote_doc) context.parse("http://example.com/context") - } + end + + before(:all) { described_class.instance_variable_set(:@cache, nil) } it "encodes as utf-8" do expect(subject.to_rb).to match(/encoding: utf-8/) @@ -960,14 +997,14 @@ def containers end describe "#base=" do - subject { + subject do context.parse({ '@base' => 'http://base/', '@vocab' => 'http://vocab/', 'ex' => 'http://example.org/', '_' => 'http://underscore/' }) - } + end it "sets new base uri given an absolute uri" do subject.base = "http://example.org/" @@ -981,11 +1018,11 @@ def containers end describe "#vocab=" do - subject { + subject do context.parse({ - '@base' => 'http://base/resource', + '@base' => 'http://base/resource' }) - } + end it "sets vocab from absolute iri" do subject.vocab = "http://example.org/" @@ -1023,21 +1060,21 @@ def containers end describe "#expand_iri" do - subject { + subject do context.parse({ '@base' => 'http://base/base', '@vocab' => 'http://vocab/', 'ex' => 'http://example.org/', '_' => 'http://underscore/' }) - } + end it "bnode" do expect(subject.expand_iri("_:a")).to be_a(RDF::Node) end context "keywords" do - %w(id type).each do |kw| + %w[id type].each do |kw| it "expands #{kw} to @#{kw}" do subject.set_mapping(kw, "@#{kw}") expect(subject.expand_iri(kw, vocab: true)).to produce("@#{kw}", logger) @@ -1048,22 +1085,22 @@ def containers context "relative IRI" do context "with no options" do { - "absolute IRI" => ["http://example.org/", RDF::URI("http://example.org/")], - "term" => ["ex", RDF::URI("ex")], - "prefix:suffix" => ["ex:suffix", RDF::URI("http://example.org/suffix")], - "#frag" => ["#frag", RDF::URI("#frag")], - "#frag:2" => ["#frag:2", RDF::URI("#frag:2")], - "keyword" => ["@type", "@type"], - "unmapped" => ["foo", RDF::URI("foo")], - "relative" => ["foo/bar", RDF::URI("foo/bar")], - "dotseg" => ["../foo/bar", RDF::URI("../foo/bar")], - "empty term" => ["", RDF::URI("")], - "another abs IRI"=>["ex://foo", RDF::URI("ex://foo")], + "absolute IRI" => ["http://example.org/", RDF::URI("http://example.org/")], + "term" => ["ex", RDF::URI("ex")], + "prefix:suffix" => ["ex:suffix", RDF::URI("http://example.org/suffix")], + "#frag" => ["#frag", RDF::URI("#frag")], + "#frag:2" => ["#frag:2", RDF::URI("#frag:2")], + "keyword" => ["@type", "@type"], + "unmapped" => ["foo", RDF::URI("foo")], + "relative" => ["foo/bar", RDF::URI("foo/bar")], + "dotseg" => ["../foo/bar", RDF::URI("../foo/bar")], + "empty term" => ["", RDF::URI("")], + "another abs IRI" => ["ex://foo", RDF::URI("ex://foo")], "absolute IRI looking like a Compact IRI" => ["foo:bar", RDF::URI("foo:bar")], - "bnode" => ["_:t0", RDF::Node("t0")], - "_" => ["_", RDF::URI("_")], - "@" => ["@", RDF::URI("@")], + "bnode" => ["_:t0", RDF::Node("t0")], + "_" => ["_", RDF::URI("_")], + "@" => ["@", RDF::URI("@")] }.each do |title, (input, result)| it title do expect(subject.expand_iri(input)).to produce(result, logger) @@ -1073,22 +1110,22 @@ def containers context "with base IRI" do { - "absolute IRI" => ["http://example.org/", RDF::URI("http://example.org/")], - "term" => ["ex", RDF::URI("http://base/ex")], - "prefix:suffix" => ["ex:suffix", RDF::URI("http://example.org/suffix")], - "#frag" => ["#frag", RDF::URI("http://base/base#frag")], - "#frag:2" => ["#frag:2", RDF::URI("http://base/base#frag:2")], - "keyword" => ["@type", "@type"], - "unmapped" => ["foo", RDF::URI("http://base/foo")], - "relative" => ["foo/bar", RDF::URI("http://base/foo/bar")], - "dotseg" => ["../foo/bar", RDF::URI("http://base/foo/bar")], - "empty term" => ["", RDF::URI("http://base/base")], - "another abs IRI"=>["ex://foo", RDF::URI("ex://foo")], + "absolute IRI" => ["http://example.org/", RDF::URI("http://example.org/")], + "term" => ["ex", RDF::URI("http://base/ex")], + "prefix:suffix" => ["ex:suffix", RDF::URI("http://example.org/suffix")], + "#frag" => ["#frag", RDF::URI("http://base/base#frag")], + "#frag:2" => ["#frag:2", RDF::URI("http://base/base#frag:2")], + "keyword" => ["@type", "@type"], + "unmapped" => ["foo", RDF::URI("http://base/foo")], + "relative" => ["foo/bar", RDF::URI("http://base/foo/bar")], + "dotseg" => ["../foo/bar", RDF::URI("http://base/foo/bar")], + "empty term" => ["", RDF::URI("http://base/base")], + "another abs IRI" => ["ex://foo", RDF::URI("ex://foo")], "absolute IRI looking like a compact IRI" => ["foo:bar", RDF::URI("foo:bar")], - "bnode" => ["_:t0", RDF::Node("t0")], - "_" => ["_", RDF::URI("http://base/_")], - "@" => ["@", RDF::URI("http://base/@")], + "bnode" => ["_:t0", RDF::Node("t0")], + "_" => ["_", RDF::URI("http://base/_")], + "@" => ["@", RDF::URI("http://base/@")] }.each do |title, (input, result)| it title do expect(subject.expand_iri(input, documentRelative: true)).to produce(result, logger) @@ -1098,21 +1135,21 @@ def containers context "@vocab" do { - "absolute IRI" => ["http://example.org/", RDF::URI("http://example.org/")], - "term" => ["ex", RDF::URI("http://example.org/")], - "prefix:suffix" => ["ex:suffix", RDF::URI("http://example.org/suffix")], - "#frag" => ["#frag", RDF::URI("http://vocab/#frag")], - "#frag:2" => ["#frag:2", RDF::URI("http://vocab/#frag:2")], - "keyword" => ["@type", "@type"], - "unmapped" => ["foo", RDF::URI("http://vocab/foo")], - "relative" => ["foo/bar", RDF::URI("http://vocab/foo/bar")], - "dotseg" => ["../foo/bar", RDF::URI("http://vocab/../foo/bar")], - "another abs IRI"=>["ex://foo", RDF::URI("ex://foo")], + "absolute IRI" => ["http://example.org/", RDF::URI("http://example.org/")], + "term" => ["ex", RDF::URI("http://example.org/")], + "prefix:suffix" => ["ex:suffix", RDF::URI("http://example.org/suffix")], + "#frag" => ["#frag", RDF::URI("http://vocab/#frag")], + "#frag:2" => ["#frag:2", RDF::URI("http://vocab/#frag:2")], + "keyword" => ["@type", "@type"], + "unmapped" => ["foo", RDF::URI("http://vocab/foo")], + "relative" => ["foo/bar", RDF::URI("http://vocab/foo/bar")], + "dotseg" => ["../foo/bar", RDF::URI("http://vocab/../foo/bar")], + "another abs IRI" => ["ex://foo", RDF::URI("ex://foo")], "absolute IRI looking like a compact IRI" => ["foo:bar", RDF::URI("foo:bar")], - "bnode" => ["_:t0", RDF::Node("t0")], - "_" => ["_", RDF::URI("http://underscore/")], - "@" => ["@", RDF::URI("http://vocab/@")], + "bnode" => ["_:t0", RDF::Node("t0")], + "_" => ["_", RDF::URI("http://underscore/")], + "@" => ["@", RDF::URI("http://vocab/@")] }.each do |title, (input, result)| it title do expect(subject.expand_iri(input, vocab: true)).to produce(result, logger) @@ -1120,30 +1157,30 @@ def containers end context "set to ''" do - subject { + subject do context.parse({ '@base' => 'http://base/base', '@vocab' => '', 'ex' => 'http://example.org/', '_' => 'http://underscore/' }) - } + end { - "absolute IRI" => ["http://example.org/", RDF::URI("http://example.org/")], - "term" => ["ex", RDF::URI("http://example.org/")], - "prefix:suffix" => ["ex:suffix", RDF::URI("http://example.org/suffix")], - "#frag" => ["#frag", RDF::URI("http://base/base#frag")], - "#frag:2" => ["#frag:2", RDF::URI("http://base/base#frag:2")], - "keyword" => ["@type", "@type"], - "unmapped" => ["foo", RDF::URI("http://base/basefoo")], - "relative" => ["foo/bar", RDF::URI("http://base/basefoo/bar")], - "dotseg" => ["../foo/bar", RDF::URI("http://base/base../foo/bar")], - "another abs IRI"=>["ex://foo", RDF::URI("ex://foo")], + "absolute IRI" => ["http://example.org/", RDF::URI("http://example.org/")], + "term" => ["ex", RDF::URI("http://example.org/")], + "prefix:suffix" => ["ex:suffix", RDF::URI("http://example.org/suffix")], + "#frag" => ["#frag", RDF::URI("http://base/base#frag")], + "#frag:2" => ["#frag:2", RDF::URI("http://base/base#frag:2")], + "keyword" => ["@type", "@type"], + "unmapped" => ["foo", RDF::URI("http://base/basefoo")], + "relative" => ["foo/bar", RDF::URI("http://base/basefoo/bar")], + "dotseg" => ["../foo/bar", RDF::URI("http://base/base../foo/bar")], + "another abs IRI" => ["ex://foo", RDF::URI("ex://foo")], "absolute IRI looking like a compact IRI" => - ["foo:bar", RDF::URI("foo:bar")], - "bnode" => ["_:t0", RDF::Node("t0")], - "_" => ["_", RDF::URI("http://underscore/")], + ["foo:bar", RDF::URI("foo:bar")], + "bnode" => ["_:t0", RDF::Node("t0")], + "_" => ["_", RDF::URI("http://underscore/")] }.each do |title, (input, result)| it title do expect(subject.expand_iri(input, vocab: true)).to produce(result, logger) @@ -1152,45 +1189,46 @@ def containers end it "expand-0110" do - ctx = JSON::LD::Context.parse({ + ctx = described_class.parse({ "@base" => "http://example.com/some/deep/directory/and/file/", "@vocab" => "/relative" }) - expect(ctx.expand_iri("#fragment-works", vocab: true)).to produce("http://example.com/relative#fragment-works", logger) + expect(ctx.expand_iri("#fragment-works", + vocab: true)).to produce("http://example.com/relative#fragment-works", logger) end end end end describe "#compact_iri" do - subject { + subject do c = context.parse({ - '@base' => 'http://base/', - "xsd" => "http://www.w3.org/2001/XMLSchema#", - 'ex' => 'http://example.org/', - '_' => 'http://underscore/', - 'rex' => {'@reverse' => "ex"}, - 'lex' => {'@id' => 'ex', '@language' => 'en'}, - 'tex' => {'@id' => 'ex', '@type' => 'xsd:string'}, - 'exp' => {'@id' => 'ex:pert'}, - 'experts' => {'@id' => 'ex:perts'}, + '@base' => 'http://base/', + "xsd" => "http://www.w3.org/2001/XMLSchema#", + 'ex' => 'http://example.org/', + '_' => 'http://underscore/', + 'rex' => { '@reverse' => "ex" }, + 'lex' => { '@id' => 'ex', '@language' => 'en' }, + 'tex' => { '@id' => 'ex', '@type' => 'xsd:string' }, + 'exp' => { '@id' => 'ex:pert' }, + 'experts' => { '@id' => 'ex:perts' } }) logger.clear c - } + end { "nil" => [nil, nil], - "absolute IRI" => ["http://example.com/", "http://example.com/"], - "prefix:suffix" => ["ex:suffix", "http://example.org/suffix"], - "unmapped" => ["foo", "foo"], - "bnode" => [JSON::LD::JsonLdError:: IRIConfusedWithPrefix, RDF::Node("a")], - "relative" => ["foo/bar", "http://base/foo/bar"], - "odd Compact IRI"=>["ex:perts", "http://example.org/perts"] + "absolute IRI" => ["http://example.com/", "http://example.com/"], + "prefix:suffix" => ["ex:suffix", "http://example.org/suffix"], + "unmapped" => %w[foo foo], + "bnode" => [JSON::LD::JsonLdError::IRIConfusedWithPrefix, RDF::Node("a")], + "relative" => ["foo/bar", "http://base/foo/bar"], + "odd Compact IRI" => ["ex:perts", "http://example.org/perts"] }.each do |title, (result, input)| it title do if result.is_a?(Class) - expect {subject.compact_iri(input)}.to raise_error(result) + expect { subject.compact_iri(input) }.to raise_error(result) else expect(subject.compact_iri(input)).to produce(result, logger) end @@ -1199,17 +1237,17 @@ def containers context "with :vocab option" do { - "absolute IRI" => ["http://example.com/", "http://example.com/"], - "prefix:suffix" => ["ex:suffix", "http://example.org/suffix"], - "keyword" => ["@type", "@type"], - "unmapped" => ["foo", "foo"], - "bnode" => [JSON::LD::JsonLdError:: IRIConfusedWithPrefix, RDF::Node("a")], - "relative" => ["http://base/foo/bar", "http://base/foo/bar"], - "odd Compact IRI"=> ["experts", "http://example.org/perts"] + "absolute IRI" => ["http://example.com/", "http://example.com/"], + "prefix:suffix" => ["ex:suffix", "http://example.org/suffix"], + "keyword" => ["@type", "@type"], + "unmapped" => %w[foo foo], + "bnode" => [JSON::LD::JsonLdError::IRIConfusedWithPrefix, RDF::Node("a")], + "relative" => ["http://base/foo/bar", "http://base/foo/bar"], + "odd Compact IRI" => ["experts", "http://example.org/perts"] }.each do |title, (result, input)| it title do if result.is_a?(Class) - expect {subject.compact_iri(input, vocab: true)}.to raise_error(result) + expect { subject.compact_iri(input, vocab: true) }.to raise_error(result) else expect(subject.compact_iri(input, vocab: true)).to produce(result, logger) end @@ -1218,20 +1256,20 @@ def containers end context "with @vocab" do - before(:each) { subject.vocab = "http://example.org/"} + before { subject.vocab = "http://example.org/" } { - "absolute IRI" => ["http://example.com/", "http://example.com/"], - "prefix:suffix" => ["suffix", "http://example.org/suffix"], - "keyword" => ["@type", "@type"], - "unmapped" => ["foo", "foo"], - "bnode" => [JSON::LD::JsonLdError:: IRIConfusedWithPrefix, RDF::Node("a")], - "relative" => ["http://base/foo/bar", "http://base/foo/bar"], - "odd Compact IRI"=> ["experts", "http://example.org/perts"] + "absolute IRI" => ["http://example.com/", "http://example.com/"], + "prefix:suffix" => ["suffix", "http://example.org/suffix"], + "keyword" => ["@type", "@type"], + "unmapped" => %w[foo foo], + "bnode" => [JSON::LD::JsonLdError::IRIConfusedWithPrefix, RDF::Node("a")], + "relative" => ["http://base/foo/bar", "http://base/foo/bar"], + "odd Compact IRI" => ["experts", "http://example.org/perts"] }.each do |title, (result, input)| it title do if result.is_a?(Class) - expect {subject.compact_iri(input, vocab: true)}.to raise_error(result) + expect { subject.compact_iri(input, vocab: true) }.to raise_error(result) else expect(subject.compact_iri(input, vocab: true)).to produce(result, logger) end @@ -1241,28 +1279,28 @@ def containers it "does not use @vocab if it would collide with a term" do subject.set_mapping("name", "http://xmlns.com/foaf/0.1/name") subject.set_mapping("ex", nil) - expect(subject.compact_iri("http://example.org/name", vocab: true)). - not_to produce("name", logger) + expect(subject.compact_iri("http://example.org/name", vocab: true)) + .not_to produce("name", logger) end context "with @vocab: relative" do - before(:each) { + before do subject.vocab = nil subject.base = 'http://base/base' - } + end { - "absolute IRI" => ["http://example.com/", "http://example.com/"], - "prefix:suffix" => ["ex:suffix", "http://example.org/suffix"], - "keyword" => ["@type", "@type"], - "unmapped" => ["foo", "foo"], - "bnode" => [JSON::LD::JsonLdError:: IRIConfusedWithPrefix, RDF::Node("a")], - "relative" => ["http://base/foo/bar", "http://base/foo/bar"], - "odd Compact IRI"=> ["experts", "http://example.org/perts"] + "absolute IRI" => ["http://example.com/", "http://example.com/"], + "prefix:suffix" => ["ex:suffix", "http://example.org/suffix"], + "keyword" => ["@type", "@type"], + "unmapped" => %w[foo foo], + "bnode" => [JSON::LD::JsonLdError::IRIConfusedWithPrefix, RDF::Node("a")], + "relative" => ["http://base/foo/bar", "http://base/foo/bar"], + "odd Compact IRI" => ["experts", "http://example.org/perts"] }.each do |title, (result, input)| it title do if result.is_a?(Class) - expect {subject.compact_iri(input, vocab: true)}.to raise_error(result) + expect { subject.compact_iri(input, vocab: true) }.to raise_error(result) else expect(subject.compact_iri(input, vocab: true)).to produce(result, logger) end @@ -1276,38 +1314,39 @@ def containers c = subject.parse({ "xsd" => RDF::XSD.to_s, "plain" => "http://example.com/plain", - "lang" => {"@id" => "http://example.com/lang", "@language" => "en"}, - "dir" => {"@id" => "http://example.com/dir", "@direction" => "ltr"}, - "langdir" => {"@id" => "http://example.com/langdir", "@language" => "en", "@direction" => "ltr"}, - "bool" => {"@id" => "http://example.com/bool", "@type" => "xsd:boolean"}, - "integer" => {"@id" => "http://example.com/integer", "@type" => "xsd:integer"}, - "double" => {"@id" => "http://example.com/double", "@type" => "xsd:double"}, - "date" => {"@id" => "http://example.com/date", "@type" => "xsd:date"}, - "id" => {"@id" => "http://example.com/id", "@type" => "@id"}, - 'graph' => {'@id' => 'http://example.com/graph', '@container' => '@graph'}, - 'json' => {'@id' => 'http://example.com/json', '@type' => '@json'}, - - "list_plain" => {"@id" => "http://example.com/plain", "@container" => "@list"}, - "list_lang" => {"@id" => "http://example.com/lang", "@language" => "en", "@container" => "@list"}, - "list_bool" => {"@id" => "http://example.com/bool", "@type" => "xsd:boolean", "@container" => "@list"}, - "list_integer" => {"@id" => "http://example.com/integer", "@type" => "xsd:integer", "@container" => "@list"}, - "list_double" => {"@id" => "http://example.com/double", "@type" => "xsd:double", "@container" => "@list"}, - "list_date" => {"@id" => "http://example.com/date", "@type" => "xsd:date", "@container" => "@list"}, - "list_id" => {"@id" => "http://example.com/id", "@type" => "@id", "@container" => "@list"}, - "list_graph" => {"@id" => "http://example.com/graph", "@type" => "@id", "@container" => "@list"}, - - "set_plain" => {"@id" => "http://example.com/plain", "@container" => "@set"}, - "set_lang" => {"@id" => "http://example.com/lang", "@language" => "en", "@container" => "@set"}, - "set_bool" => {"@id" => "http://example.com/bool", "@type" => "xsd:boolean", "@container" => "@set"}, - "set_integer" => {"@id" => "http://example.com/integer", "@type" => "xsd:integer", "@container" => "@set"}, - "set_double" => {"@id" => "http://example.com/double", "@type" => "xsd:double", "@container" => "@set"}, - "set_date" => {"@id" => "http://example.com/date", "@type" => "xsd:date", "@container" => "@set"}, - "set_id" => {"@id" => "http://example.com/id", "@type" => "@id", "@container" => "@set"}, - 'set_graph' => {'@id' => 'http://example.com/graph', '@container' => ['@graph', '@set']}, - - "map_lang" => {"@id" => "http://example.com/lang", "@container" => "@language"}, - - "set_map_lang" => {"@id" => "http://example.com/lang", "@container" => ["@language", "@set"]}, + "lang" => { "@id" => "http://example.com/lang", "@language" => "en" }, + "dir" => { "@id" => "http://example.com/dir", "@direction" => "ltr" }, + "langdir" => { "@id" => "http://example.com/langdir", "@language" => "en", "@direction" => "ltr" }, + "bool" => { "@id" => "http://example.com/bool", "@type" => "xsd:boolean" }, + "integer" => { "@id" => "http://example.com/integer", "@type" => "xsd:integer" }, + "double" => { "@id" => "http://example.com/double", "@type" => "xsd:double" }, + "date" => { "@id" => "http://example.com/date", "@type" => "xsd:date" }, + "id" => { "@id" => "http://example.com/id", "@type" => "@id" }, + 'graph' => { '@id' => 'http://example.com/graph', '@container' => '@graph' }, + 'json' => { '@id' => 'http://example.com/json', '@type' => '@json' }, + + "list_plain" => { "@id" => "http://example.com/plain", "@container" => "@list" }, + "list_lang" => { "@id" => "http://example.com/lang", "@language" => "en", "@container" => "@list" }, + "list_bool" => { "@id" => "http://example.com/bool", "@type" => "xsd:boolean", "@container" => "@list" }, + "list_integer" => { "@id" => "http://example.com/integer", "@type" => "xsd:integer", + "@container" => "@list" }, + "list_double" => { "@id" => "http://example.com/double", "@type" => "xsd:double", "@container" => "@list" }, + "list_date" => { "@id" => "http://example.com/date", "@type" => "xsd:date", "@container" => "@list" }, + "list_id" => { "@id" => "http://example.com/id", "@type" => "@id", "@container" => "@list" }, + "list_graph" => { "@id" => "http://example.com/graph", "@type" => "@id", "@container" => "@list" }, + + "set_plain" => { "@id" => "http://example.com/plain", "@container" => "@set" }, + "set_lang" => { "@id" => "http://example.com/lang", "@language" => "en", "@container" => "@set" }, + "set_bool" => { "@id" => "http://example.com/bool", "@type" => "xsd:boolean", "@container" => "@set" }, + "set_integer" => { "@id" => "http://example.com/integer", "@type" => "xsd:integer", "@container" => "@set" }, + "set_double" => { "@id" => "http://example.com/double", "@type" => "xsd:double", "@container" => "@set" }, + "set_date" => { "@id" => "http://example.com/date", "@type" => "xsd:date", "@container" => "@set" }, + "set_id" => { "@id" => "http://example.com/id", "@type" => "@id", "@container" => "@set" }, + 'set_graph' => { '@id' => 'http://example.com/graph', '@container' => ['@graph', '@set'] }, + + "map_lang" => { "@id" => "http://example.com/lang", "@container" => "@language" }, + + "set_map_lang" => { "@id" => "http://example.com/lang", "@container" => ["@language", "@set"] } }) logger.clear c @@ -1315,21 +1354,21 @@ def containers # Prefered sets and maps over non sets or maps { - "set_plain" => [{"@value" => "foo"}], - "map_lang" => [{"@value" => "en", "@language" => "en"}], - "set_bool" => [{"@value" => "true", "@type" => "http://www.w3.org/2001/XMLSchema#boolean"}], - "set_integer" => [{"@value" => "1", "@type" => "http://www.w3.org/2001/XMLSchema#integer"}], - "set_id" => [{"@id" => "http://example.org/id"}], - "graph" => [{"@graph" => [{"@id" => "http://example.org/id"}]}], - 'json' => [{"@value" => {"some" => "json"}, "@type" => "@json"}], - 'dir' => [{"@value" => "dir", "@direction" => "ltr"}], - 'langdir' => [{"@value" => "lang dir", "@language" => "en", "@direction" => "ltr"}], + "set_plain" => [{ "@value" => "foo" }], + "map_lang" => [{ "@value" => "en", "@language" => "en" }], + "set_bool" => [{ "@value" => "true", "@type" => "http://www.w3.org/2001/XMLSchema#boolean" }], + "set_integer" => [{ "@value" => "1", "@type" => "http://www.w3.org/2001/XMLSchema#integer" }], + "set_id" => [{ "@id" => "http://example.org/id" }], + "graph" => [{ "@graph" => [{ "@id" => "http://example.org/id" }] }], + 'json' => [{ "@value" => { "some" => "json" }, "@type" => "@json" }], + 'dir' => [{ "@value" => "dir", "@direction" => "ltr" }], + 'langdir' => [{ "@value" => "lang dir", "@language" => "en", "@direction" => "ltr" }] }.each do |prop, values| context "uses #{prop}" do values.each do |value| it "for #{value.inspect}" do - expect(ctx.compact_iri("http://example.com/#{prop.sub(/^\w+_/, '')}", value: value, vocab: true)). - to produce(prop, logger) + expect(ctx.compact_iri("http://example.com/#{prop.sub(/^\w+_/, '')}", value: value, vocab: true)) + .to produce(prop, logger) end end end @@ -1338,28 +1377,29 @@ def containers # @language and @type with @list context "for @list" do { - "list_plain" => [ - [{"@value" => "foo"}], - [{"@value" => "foo"}, {"@value" => "bar"}, {"@value" => "baz"}], - [{"@value" => "foo"}, {"@value" => "bar"}, {"@value" => 1}], - [{"@value" => "foo"}, {"@value" => "bar"}, {"@value" => 1.1}], - [{"@value" => "foo"}, {"@value" => "bar"}, {"@value" => true}], - [{"@value" => "foo"}, {"@value" => "bar"}, {"@value" => 1}], - [{"@value" => "de", "@language" => "de"}, {"@value" => "jp", "@language" => "jp"}], - [{"@value" => true}], [{"@value" => false}], - [{"@value" => 1}], [{"@value" => 1.1}], + "list_plain" => [ + [{ "@value" => "foo" }], + [{ "@value" => "foo" }, { "@value" => "bar" }, { "@value" => "baz" }], + [{ "@value" => "foo" }, { "@value" => "bar" }, { "@value" => 1 }], + [{ "@value" => "foo" }, { "@value" => "bar" }, { "@value" => 1.1 }], + [{ "@value" => "foo" }, { "@value" => "bar" }, { "@value" => true }], + [{ "@value" => "foo" }, { "@value" => "bar" }, { "@value" => 1 }], + [{ "@value" => "de", "@language" => "de" }, { "@value" => "jp", "@language" => "jp" }], + [{ "@value" => true }], [{ "@value" => false }], + [{ "@value" => 1 }], [{ "@value" => 1.1 }] ], - "list_lang" => [[{"@value" => "en", "@language" => "en"}]], - "list_bool" => [[{"@value" => "true", "@type" => RDF::XSD.boolean.to_s}]], - "list_integer" => [[{"@value" => "1", "@type" => RDF::XSD.integer.to_s}]], - "list_double" => [[{"@value" => "1", "@type" => RDF::XSD.double.to_s}]], - "list_date" => [[{"@value" => "2012-04-17", "@type" => RDF::XSD.date.to_s}]], + "list_lang" => [[{ "@value" => "en", "@language" => "en" }]], + "list_bool" => [[{ "@value" => "true", "@type" => RDF::XSD.boolean.to_s }]], + "list_integer" => [[{ "@value" => "1", "@type" => RDF::XSD.integer.to_s }]], + "list_double" => [[{ "@value" => "1", "@type" => RDF::XSD.double.to_s }]], + "list_date" => [[{ "@value" => "2012-04-17", "@type" => RDF::XSD.date.to_s }]] }.each do |prop, values| context "uses #{prop}" do values.each do |value| - it "for #{{"@list" => value}.inspect}" do - expect(ctx.compact_iri("http://example.com/#{prop.sub(/^\w+_/, '')}", value: {"@list" => value}, vocab: true)). - to produce(prop, logger) + it "for #{{ '@list' => value }.inspect}" do + expect(ctx.compact_iri("http://example.com/#{prop.sub(/^\w+_/, '')}", value: { "@list" => value }, + vocab: true)) + .to produce(prop, logger) end end end @@ -1370,16 +1410,16 @@ def containers context "Compact IRI compaction" do { "nil" => [nil, nil], - "absolute IRI" => ["http://example.com/", "http://example.com/"], - "prefix:suffix" => ["ex:suffix", "http://example.org/suffix"], - "unmapped" => ["foo", "foo"], - "bnode" => [JSON::LD::JsonLdError:: IRIConfusedWithPrefix, RDF::Node("a")], - "relative" => ["foo/bar", "http://base/foo/bar"], - "odd Compact IRI"=> ["ex:perts", "http://example.org/perts"] + "absolute IRI" => ["http://example.com/", "http://example.com/"], + "prefix:suffix" => ["ex:suffix", "http://example.org/suffix"], + "unmapped" => %w[foo foo], + "bnode" => [JSON::LD::JsonLdError::IRIConfusedWithPrefix, RDF::Node("a")], + "relative" => ["foo/bar", "http://base/foo/bar"], + "odd Compact IRI" => ["ex:perts", "http://example.org/perts"] }.each do |title, (result, input)| it title do if result.is_a?(Class) - expect {subject.compact_iri(input)}.to raise_error(result) + expect { subject.compact_iri(input) }.to raise_error(result) else expect(subject.compact_iri(input)).to produce(result, logger) end @@ -1387,20 +1427,20 @@ def containers end context "and @vocab" do - before(:each) { subject.vocab = "http://example.org/"} + before { subject.vocab = "http://example.org/" } { - "absolute IRI" => ["http://example.com/", "http://example.com/"], - "prefix:suffix" => ["suffix", "http://example.org/suffix"], - "keyword" => ["@type", "@type"], - "unmapped" => ["foo", "foo"], - "bnode" => [JSON::LD::JsonLdError:: IRIConfusedWithPrefix, RDF::Node("a")], - "relative" => ["http://base/foo/bar", "http://base/foo/bar"], - "odd Compact IRI"=> ["experts", "http://example.org/perts"] + "absolute IRI" => ["http://example.com/", "http://example.com/"], + "prefix:suffix" => ["suffix", "http://example.org/suffix"], + "keyword" => ["@type", "@type"], + "unmapped" => %w[foo foo], + "bnode" => [JSON::LD::JsonLdError::IRIConfusedWithPrefix, RDF::Node("a")], + "relative" => ["http://base/foo/bar", "http://base/foo/bar"], + "odd Compact IRI" => ["experts", "http://example.org/perts"] }.each do |title, (result, input)| it title do if result.is_a?(Class) - expect {subject.compact_iri(input, vocab: true)}.to raise_error(result) + expect { subject.compact_iri(input, vocab: true) }.to raise_error(result) else expect(subject.compact_iri(input, vocab: true)).to produce(result, logger) end @@ -1411,7 +1451,7 @@ def containers context "compact-0018" do let(:ctx) do - subject.parse(JSON.parse %({ + subject.parse(JSON.parse(%({ "id1": "http://example.com/id1", "type1": "http://example.com/t1", "type2": "http://example.com/t2", @@ -1443,7 +1483,7 @@ def containers "@container": "@list", "@type": "type2" } - })) + }))) end { @@ -1455,7 +1495,7 @@ def containers '{ "@value": true}', '{ "@value": false}' ], - "term1" => %q({ + "term1" => '{ "@list": [ { "@value": "v1.1", "@language": "de" }, { "@value": "v1.2", "@language": "en" }, @@ -1464,8 +1504,8 @@ def containers { "@value": true}, { "@value": false} ] - }), - "term2" => %q({ + }', + "term2" => '{ "@list": [ { "@value": "v2.1", "@language": "en" }, { "@value": "v2.2", "@language": "en" }, @@ -1474,8 +1514,8 @@ def containers { "@value": "v2.5", "@language": "en" }, { "@value": "v2.6", "@language": "en" } ] - }), - "term3" => %q({ + }', + "term3" => '{ "@list": [ { "@value": "v3.1"}, { "@value": "v3.2"}, @@ -1484,8 +1524,8 @@ def containers { "@value": "v3.5"}, { "@value": "v3.6"} ] - }), - "term4" => %q({ + }', + "term4" => '{ "@list": [ { "@value": "v4.1", "@type": "http://example.com/t1" }, { "@value": "v4.2", "@type": "http://example.com/t1" }, @@ -1494,8 +1534,8 @@ def containers { "@value": "v4.5", "@type": "http://example.com/t1" }, { "@value": "v4.6", "@type": "http://example.com/t1" } ] - }), - "term5" => %q({ + }', + "term5" => '{ "@list": [ { "@value": "v5.1", "@type": "http://example.com/t2" }, { "@value": "v5.2", "@type": "http://example.com/t2" }, @@ -1504,12 +1544,12 @@ def containers { "@value": "v5.5", "@type": "http://example.com/t2" }, { "@value": "v5.6", "@type": "http://example.com/t2" } ] - }), + }' }.each do |term, value| [value].flatten.each do |v| it "Uses #{term} for #{v}" do - expect(ctx.compact_iri("http://example.com/term", value: JSON.parse(v), vocab: true)). - to produce(term, logger) + expect(ctx.compact_iri("http://example.com/term", value: JSON.parse(v), vocab: true)) + .to produce(term, logger) end end end @@ -1519,19 +1559,21 @@ def containers let(:ctx) do subject.parse({ "ex" => "http://example.org/ns#", - "ex:property" => {"@container" => "@list"} + "ex:property" => { "@container" => "@list" } }) end + it "Compact @id that is a property IRI when @container is @list" do - expect(ctx.compact_iri("http://example.org/ns#property", vocab: false)). - to produce("ex:property", logger) + expect(ctx.compact_iri("http://example.org/ns#property", vocab: false)) + .to produce("ex:property", logger) end end context "compact-0041" do let(:ctx) do - subject.parse({"name" => {"@id" => "http://example.com/property", "@container" => "@list"}}) + subject.parse({ "name" => { "@id" => "http://example.com/property", "@container" => "@list" } }) end + it "Does not use @list with @index" do expect(ctx.compact_iri("http://example.com/property", value: { "@list" => ["one item"], @@ -1542,55 +1584,61 @@ def containers end describe "#expand_value" do - subject { + subject do ctx = context.parse({ "dc" => RDF::Vocab::DC.to_uri.to_s, "ex" => "http://example.org/", "foaf" => RDF::Vocab::FOAF.to_uri.to_s, "xsd" => "http://www.w3.org/2001/XMLSchema#", - "foaf:age" => {"@type" => "xsd:integer"}, - "foaf:knows" => {"@type" => "@id"}, - "dc:created" => {"@type" => "xsd:date"}, - "ex:integer" => {"@type" => "xsd:integer"}, - "ex:double" => {"@type" => "xsd:double"}, - "ex:boolean" => {"@type" => "xsd:boolean"}, - "ex:none" => {"@type" => "@none"}, - "ex:json" => {"@type" => "@json"} + "foaf:age" => { "@type" => "xsd:integer" }, + "foaf:knows" => { "@type" => "@id" }, + "dc:created" => { "@type" => "xsd:date" }, + "ex:integer" => { "@type" => "xsd:integer" }, + "ex:double" => { "@type" => "xsd:double" }, + "ex:boolean" => { "@type" => "xsd:boolean" }, + "ex:none" => { "@type" => "@none" }, + "ex:json" => { "@type" => "@json" } }) logger.clear ctx - } + end - %w(boolean integer string dateTime date time).each do |dt| + %w[boolean integer string dateTime date time].each do |dt| it "expands datatype xsd:#{dt}" do - expect(subject.expand_value("foo", RDF::XSD[dt])).to produce({"@id" => "http://www.w3.org/2001/XMLSchema##{dt}"}, logger) + expect(subject.expand_value("foo", + RDF::XSD[dt])).to produce({ "@id" => "http://www.w3.org/2001/XMLSchema##{dt}" }, logger) end end { - "absolute IRI" => ["foaf:knows", "http://example.com/", {"@id" => "http://example.com/"}], - "term" => ["foaf:knows", "ex", {"@id" => "ex"}], - "prefix:suffix" => ["foaf:knows", "ex:suffix", {"@id" => "http://example.org/suffix"}], - "no IRI" => ["foo", "http://example.com/", {"@value" => "http://example.com/"}], - "no term" => ["foo", "ex", {"@value" => "ex"}], - "no prefix" => ["foo", "ex:suffix", {"@value" => "ex:suffix"}], - "integer" => ["foaf:age", "54", {"@value" => "54", "@type" => RDF::XSD.integer.to_s}], - "date " => ["dc:created", "2011-12-27Z", {"@value" => "2011-12-27Z", "@type" => RDF::XSD.date.to_s}], - "native boolean" => ["foo", true, {"@value" => true}], - "native integer" => ["foo", 1, {"@value" => 1}], - "native double" => ["foo", 1.1e1, {"@value" => 1.1E1}], - "native date" => ["foo", Date.parse("2011-12-27"), {"@value" => "2011-12-27", "@type" => RDF::XSD.date.to_s}], - "native dateTime" =>["foo", DateTime.parse("2011-12-27T10:11:12Z"), {"@value" => "2011-12-27T10:11:12Z", "@type" => RDF::XSD.dateTime.to_s}], - "ex:none string" => ["ex:none", "foo", {"@value" => "foo"}], - "ex:none boolean" =>["ex:none", true, {"@value" => true}], - "ex:none integer" =>["ex:none", 1, {"@value" => 1}], - "ex:none double" => ["ex:none", 1.1e1, {"@value" => 1.1E1}], - "ex:json string" => ["ex:json", "foo", {"@value" => "foo", "@type" => "@json"}], - "ex:json boolean" =>["ex:json", true, {"@value" => true, "@type" => "@json"}], - "ex:json integer" =>["ex:json", 1, {"@value" => 1, "@type" => "@json"}], - "ex:json double" => ["ex:json", 1.1e1, {"@value" => 1.1e1, "@type" => "@json"}], - "ex:json object" => ["ex:json", {"foo" => "bar"}, {"@value" => {"foo" => "bar"}, "@type" => "@json"}], - "ex:json array" => ["ex:json", [{"foo" => "bar"}], {"@value" => [{"foo" => "bar"}], "@type" => "@json"}], + "absolute IRI" => ["foaf:knows", "http://example.com/", { "@id" => "http://example.com/" }], + "term" => ["foaf:knows", "ex", { "@id" => "ex" }], + "prefix:suffix" => ["foaf:knows", "ex:suffix", { "@id" => "http://example.org/suffix" }], + "no IRI" => ["foo", "http://example.com/", { "@value" => "http://example.com/" }], + "no term" => ["foo", "ex", { "@value" => "ex" }], + "no prefix" => ["foo", "ex:suffix", { "@value" => "ex:suffix" }], + "integer" => ["foaf:age", "54", { "@value" => "54", "@type" => RDF::XSD.integer.to_s }], + "date " => ["dc:created", "2011-12-27Z", + { "@value" => "2011-12-27Z", "@type" => RDF::XSD.date.to_s }], + "native boolean" => ["foo", true, { "@value" => true }], + "native integer" => ["foo", 1, { "@value" => 1 }], + "native double" => ["foo", 1.1e1, { "@value" => 1.1E1 }], + "native date" => ["foo", Date.parse("2011-12-27"), + { "@value" => "2011-12-27", "@type" => RDF::XSD.date.to_s }], + "native dateTime" => ["foo", DateTime.parse("2011-12-27T10:11:12Z"), + { "@value" => "2011-12-27T10:11:12Z", "@type" => RDF::XSD.dateTime.to_s }], + "ex:none string" => ["ex:none", "foo", { "@value" => "foo" }], + "ex:none boolean" => ["ex:none", true, { "@value" => true }], + "ex:none integer" => ["ex:none", 1, { "@value" => 1 }], + "ex:none double" => ["ex:none", 1.1e1, { "@value" => 1.1E1 }], + "ex:json string" => ["ex:json", "foo", { "@value" => "foo", "@type" => "@json" }], + "ex:json boolean" => ["ex:json", true, { "@value" => true, "@type" => "@json" }], + "ex:json integer" => ["ex:json", 1, { "@value" => 1, "@type" => "@json" }], + "ex:json double" => ["ex:json", 1.1e1, { "@value" => 1.1e1, "@type" => "@json" }], + "ex:json object" => ["ex:json", { "foo" => "bar" }, + { "@value" => { "foo" => "bar" }, "@type" => "@json" }], + "ex:json array" => ["ex:json", [{ "foo" => "bar" }], + { "@value" => [{ "foo" => "bar" }], "@type" => "@json" }] }.each do |title, (key, compacted, expanded)| it title do expect(subject.expand_value(key, compacted)).to produce(expanded, logger) @@ -1598,14 +1646,16 @@ def containers end context "@language" do - before(:each) {subject.default_language = "en"} + before { subject.default_language = "en" } + { - "no IRI" => ["foo", "http://example.com/", {"@value" => "http://example.com/", "@language" => "en"}], - "no term" => ["foo", "ex", {"@value" => "ex", "@language" => "en"}], - "no prefix" => ["foo", "ex:suffix", {"@value" => "ex:suffix", "@language" => "en"}], - "native boolean" => ["foo", true, {"@value" => true}], - "native integer" => ["foo", 1, {"@value" => 1}], - "native double" => ["foo", 1.1, {"@value" => 1.1}], + "no IRI" => ["foo", "http://example.com/", + { "@value" => "http://example.com/", "@language" => "en" }], + "no term" => ["foo", "ex", { "@value" => "ex", "@language" => "en" }], + "no prefix" => ["foo", "ex:suffix", { "@value" => "ex:suffix", "@language" => "en" }], + "native boolean" => ["foo", true, { "@value" => true }], + "native integer" => ["foo", 1, { "@value" => 1 }], + "native double" => ["foo", 1.1, { "@value" => 1.1 }] }.each do |title, (key, compacted, expanded)| it title do expect(subject.expand_value(key, compacted)).to produce(expanded, logger) @@ -1614,25 +1664,26 @@ def containers end context "coercion" do - before(:each) {subject.default_language = "en"} + before { subject.default_language = "en" } + { - "boolean-boolean" => ["ex:boolean", true, {"@value" => true, "@type" => RDF::XSD.boolean.to_s}], - "boolean-integer" => ["ex:integer", true, {"@value" => true, "@type" => RDF::XSD.integer.to_s}], - "boolean-double" => ["ex:double", true, {"@value" => true, "@type" => RDF::XSD.double.to_s}], - "boolean-json" => ["ex:json", true, {"@value" => true, "@type" => '@json'}], - "double-boolean" => ["ex:boolean", 1.1, {"@value" => 1.1, "@type" => RDF::XSD.boolean.to_s}], - "double-double" => ["ex:double", 1.1, {"@value" => 1.1, "@type" => RDF::XSD.double.to_s}], - "double-integer" => ["foaf:age", 1.1, {"@value" => 1.1, "@type" => RDF::XSD.integer.to_s}], - "double-json" => ["ex:json", 1.1, {"@value" => 1.1, "@type" => '@json'}], - "json-json" => ["ex:json", {"foo" => "bar"}, {"@value" => {"foo" => "bar"}, "@type" => '@json'}], - "integer-boolean" => ["ex:boolean", 1, {"@value" => 1, "@type" => RDF::XSD.boolean.to_s}], - "integer-double" => ["ex:double", 1, {"@value" => 1, "@type" => RDF::XSD.double.to_s}], - "integer-integer" => ["foaf:age", 1, {"@value" => 1, "@type" => RDF::XSD.integer.to_s}], - "integer-json" => ["ex:json", 1, {"@value" => 1, "@type" => '@json'}], - "string-boolean" => ["ex:boolean", "foo", {"@value" => "foo", "@type" => RDF::XSD.boolean.to_s}], - "string-double" => ["ex:double", "foo", {"@value" => "foo", "@type" => RDF::XSD.double.to_s}], - "string-integer" => ["foaf:age", "foo", {"@value" => "foo", "@type" => RDF::XSD.integer.to_s}], - "string-json" => ["ex:json", "foo", {"@value" => "foo", "@type" => '@json'}], + "boolean-boolean" => ["ex:boolean", true, { "@value" => true, "@type" => RDF::XSD.boolean.to_s }], + "boolean-integer" => ["ex:integer", true, { "@value" => true, "@type" => RDF::XSD.integer.to_s }], + "boolean-double" => ["ex:double", true, { "@value" => true, "@type" => RDF::XSD.double.to_s }], + "boolean-json" => ["ex:json", true, { "@value" => true, "@type" => '@json' }], + "double-boolean" => ["ex:boolean", 1.1, { "@value" => 1.1, "@type" => RDF::XSD.boolean.to_s }], + "double-double" => ["ex:double", 1.1, { "@value" => 1.1, "@type" => RDF::XSD.double.to_s }], + "double-integer" => ["foaf:age", 1.1, { "@value" => 1.1, "@type" => RDF::XSD.integer.to_s }], + "double-json" => ["ex:json", 1.1, { "@value" => 1.1, "@type" => '@json' }], + "json-json" => ["ex:json", { "foo" => "bar" }, { "@value" => { "foo" => "bar" }, "@type" => '@json' }], + "integer-boolean" => ["ex:boolean", 1, { "@value" => 1, "@type" => RDF::XSD.boolean.to_s }], + "integer-double" => ["ex:double", 1, { "@value" => 1, "@type" => RDF::XSD.double.to_s }], + "integer-integer" => ["foaf:age", 1, { "@value" => 1, "@type" => RDF::XSD.integer.to_s }], + "integer-json" => ["ex:json", 1, { "@value" => 1, "@type" => '@json' }], + "string-boolean" => ["ex:boolean", "foo", { "@value" => "foo", "@type" => RDF::XSD.boolean.to_s }], + "string-double" => ["ex:double", "foo", { "@value" => "foo", "@type" => RDF::XSD.double.to_s }], + "string-integer" => ["foaf:age", "foo", { "@value" => "foo", "@type" => RDF::XSD.integer.to_s }], + "string-json" => ["ex:json", "foo", { "@value" => "foo", "@type" => '@json' }] }.each do |title, (key, compacted, expanded)| it title do expect(subject.expand_value(key, compacted)).to produce(expanded, logger) @@ -1642,44 +1693,51 @@ def containers end describe "#compact_value" do + subject { ctx } + let(:ctx) do c = context.parse({ - "dc" => RDF::Vocab::DC.to_uri.to_s, - "ex" => "http://example.org/", - "foaf" => RDF::Vocab::FOAF.to_uri.to_s, - "xsd" => RDF::XSD.to_s, - "langmap" => {"@id" => "http://example.com/langmap", "@container" => "@language"}, - "list" => {"@id" => "http://example.org/list", "@container" => "@list"}, - "nolang" => {"@id" => "http://example.org/nolang", "@language" => nil}, - "dc:created" => {"@type" => RDF::XSD.date.to_s}, - "foaf:age" => {"@type" => RDF::XSD.integer.to_s}, - "foaf:knows" => {"@type" => "@id"}, - "ex:none" => {"@type" => "@none"}, + "dc" => RDF::Vocab::DC.to_uri.to_s, + "ex" => "http://example.org/", + "foaf" => RDF::Vocab::FOAF.to_uri.to_s, + "xsd" => RDF::XSD.to_s, + "langmap" => { "@id" => "http://example.com/langmap", "@container" => "@language" }, + "list" => { "@id" => "http://example.org/list", "@container" => "@list" }, + "nolang" => { "@id" => "http://example.org/nolang", "@language" => nil }, + "dc:created" => { "@type" => RDF::XSD.date.to_s }, + "foaf:age" => { "@type" => RDF::XSD.integer.to_s }, + "foaf:knows" => { "@type" => "@id" }, + "ex:none" => { "@type" => "@none" } }) logger.clear c end - subject {ctx} { - "absolute IRI" => ["foaf:knows", "http://example.com/", {"@id" => "http://example.com/"}], - "prefix:suffix" => ["foaf:knows", "ex:suffix", {"@id" => "http://example.org/suffix"}], - "integer" => ["foaf:age", "54", {"@value" => "54", "@type" => RDF::XSD.integer.to_s}], - "date " => ["dc:created", "2011-12-27Z", {"@value" => "2011-12-27Z", "@type" => RDF::XSD.date.to_s}], - "no IRI" => ["foo", {"@id" =>"http://example.com/"},{"@id" => "http://example.com/"}], - "no IRI (Compact IRI)" => ["foo", {"@id" => RDF::Vocab::FOAF.Person.to_s}, {"@id" => RDF::Vocab::FOAF.Person.to_s}], - "no boolean" => ["foo", {"@value" => "true", "@type" => "xsd:boolean"},{"@value" => "true", "@type" => RDF::XSD.boolean.to_s}], - "no integer" => ["foo", {"@value" => "54", "@type" => "xsd:integer"},{"@value" => "54", "@type" => RDF::XSD.integer.to_s}], - "no date " => ["foo", {"@value" => "2011-12-27Z", "@type" => "xsd:date"}, {"@value" => "2011-12-27Z", "@type" => RDF::XSD.date.to_s}], - "no string " => ["foo", "string", {"@value" => "string"}], - "no lang " => ["nolang", "string", {"@value" => "string"}], - "native boolean" => ["foo", true, {"@value" => true}], - "native integer" => ["foo", 1, {"@value" => 1}], - "native integer(list)"=>["list", 1, {"@value" => 1}], - "native double" => ["foo", 1.1e1, {"@value" => 1.1E1}], - "ex:none IRI" => ["ex:none", {"@id" => "http://example.com/"}, {"@id" => "http://example.com/"}], - "ex:none string" => ["ex:none", {"@value" => "string"}, {"@value" => "string"}], - "ex:none integer" =>["ex:none", {"@value" => "54", "@type" => "xsd:integer"}, {"@value" => "54", "@type" => RDF::XSD.integer.to_s}], + "absolute IRI" => ["foaf:knows", "http://example.com/", { "@id" => "http://example.com/" }], + "prefix:suffix" => ["foaf:knows", "ex:suffix", { "@id" => "http://example.org/suffix" }], + "integer" => ["foaf:age", "54", { "@value" => "54", "@type" => RDF::XSD.integer.to_s }], + "date " => ["dc:created", "2011-12-27Z", + { "@value" => "2011-12-27Z", "@type" => RDF::XSD.date.to_s }], + "no IRI" => ["foo", { "@id" => "http://example.com/" }, { "@id" => "http://example.com/" }], + "no IRI (Compact IRI)" => ["foo", { "@id" => RDF::Vocab::FOAF.Person.to_s }, + { "@id" => RDF::Vocab::FOAF.Person.to_s }], + "no boolean" => ["foo", { "@value" => "true", "@type" => "xsd:boolean" }, + { "@value" => "true", "@type" => RDF::XSD.boolean.to_s }], + "no integer" => ["foo", { "@value" => "54", "@type" => "xsd:integer" }, + { "@value" => "54", "@type" => RDF::XSD.integer.to_s }], + "no date " => ["foo", { "@value" => "2011-12-27Z", "@type" => "xsd:date" }, + { "@value" => "2011-12-27Z", "@type" => RDF::XSD.date.to_s }], + "no string " => ["foo", "string", { "@value" => "string" }], + "no lang " => ["nolang", "string", { "@value" => "string" }], + "native boolean" => ["foo", true, { "@value" => true }], + "native integer" => ["foo", 1, { "@value" => 1 }], + "native integer(list)" => ["list", 1, { "@value" => 1 }], + "native double" => ["foo", 1.1e1, { "@value" => 1.1E1 }], + "ex:none IRI" => ["ex:none", { "@id" => "http://example.com/" }, { "@id" => "http://example.com/" }], + "ex:none string" => ["ex:none", { "@value" => "string" }, { "@value" => "string" }], + "ex:none integer" => ["ex:none", { "@value" => "54", "@type" => "xsd:integer" }, + { "@value" => "54", "@type" => RDF::XSD.integer.to_s }] }.each do |title, (key, compacted, expanded)| it title do expect(subject.compact_value(key, expanded)).to produce(compacted, logger) @@ -1688,24 +1746,39 @@ def containers context "@language" do { - "@id" => ["foo", {"@id" => "foo"}, {"@id" => "foo"}], - "integer" => ["foo", {"@value" => "54", "@type" => "xsd:integer"}, {"@value" => "54", "@type" => RDF::XSD.integer.to_s}], - "date" => ["foo", {"@value" => "2011-12-27Z","@type" => "xsd:date"},{"@value" => "2011-12-27Z", "@type" => RDF::XSD.date.to_s}], - "no lang" => ["foo", {"@value" => "foo" }, {"@value" => "foo"}], - "same lang" => ["foo", "foo", {"@value" => "foo", "@language" => "en"}], - "other lang" => ["foo", {"@value" => "foo", "@language" => "bar"}, {"@value" => "foo", "@language" => "bar"}], - "langmap" => ["langmap", "en", {"@value" => "en", "@language" => "en"}], - "no lang with @type coercion" => ["dc:created", {"@value" => "foo"}, {"@value" => "foo"}], - "no lang with @id coercion" => ["foaf:knows", {"@value" => "foo"}, {"@value" => "foo"}], - "no lang with @language=null" => ["nolang", "string", {"@value" => "string"}], - "same lang with @type coercion" => ["dc:created", {"@value" => "foo"}, {"@value" => "foo"}], - "same lang with @id coercion" => ["foaf:knows", {"@value" => "foo"}, {"@value" => "foo"}], - "other lang with @type coercion" => ["dc:created", {"@value" => "foo", "@language" => "bar"}, {"@value" => "foo", "@language" => "bar"}], - "other lang with @id coercion" => ["foaf:knows", {"@value" => "foo", "@language" => "bar"}, {"@value" => "foo", "@language" => "bar"}], - "native boolean" => ["foo", true, {"@value" => true}], - "native integer" => ["foo", 1, {"@value" => 1}], - "native integer(list)" => ["list", 1, {"@value" => 1}], - "native double" => ["foo", 1.1e1, {"@value" => 1.1E1}], + "@id" => ["foo", { "@id" => "foo" }, { "@id" => "foo" }], + "integer" => ["foo", { "@value" => "54", "@type" => "xsd:integer" }, + { "@value" => "54", "@type" => RDF::XSD.integer.to_s }], + "date" => ["foo", { "@value" => "2011-12-27Z", "@type" => "xsd:date" }, + { "@value" => "2011-12-27Z", "@type" => RDF::XSD.date.to_s }], + "no lang" => ["foo", { "@value" => "foo" }, + { "@value" => "foo" }], + "same lang" => ["foo", "foo", + { "@value" => "foo", "@language" => "en" }], + "other lang" => ["foo", { "@value" => "foo", "@language" => "bar" }, + { "@value" => "foo", "@language" => "bar" }], + "langmap" => ["langmap", "en", + { "@value" => "en", "@language" => "en" }], + "no lang with @type coercion" => ["dc:created", { "@value" => "foo" }, + { "@value" => "foo" }], + "no lang with @id coercion" => ["foaf:knows", { "@value" => "foo" }, + { "@value" => "foo" }], + "no lang with @language=null" => ["nolang", "string", + { "@value" => "string" }], + "same lang with @type coercion" => ["dc:created", { "@value" => "foo" }, + { "@value" => "foo" }], + "same lang with @id coercion" => ["foaf:knows", { "@value" => "foo" }, + { "@value" => "foo" }], + "other lang with @type coercion" => ["dc:created", { "@value" => "foo", "@language" => "bar" }, + { "@value" => "foo", "@language" => "bar" }], + "other lang with @id coercion" => ["foaf:knows", { "@value" => "foo", "@language" => "bar" }, + { "@value" => "foo", "@language" => "bar" }], + "native boolean" => ["foo", true, + { "@value" => true }], + "native integer" => ["foo", 1, { "@value" => 1 }], + "native integer(list)" => ["list", 1, { "@value" => 1 }], + "native double" => ["foo", 1.1e1, + { "@value" => 1.1E1 }] }.each do |title, (key, compacted, expanded)| it title do subject.default_language = "en" @@ -1715,7 +1788,7 @@ def containers end context "keywords" do - before(:each) do + before do subject.set_mapping("id", "@id") subject.set_mapping("type", "@type") subject.set_mapping("list", "@list") @@ -1725,10 +1798,10 @@ def containers end { - "@id" => [{"id" => "http://example.com/"}, {"@id" => "http://example.com/"}], - "@type" => [{"literal" => "foo", "type" => "http://example.com/"}, - {"@value" => "foo", "@type" => "http://example.com/"}], - "@value" => [{"literal" => "foo", "language" => "bar"}, {"@value" => "foo", "@language" => "bar"}], + "@id" => [{ "id" => "http://example.com/" }, { "@id" => "http://example.com/" }], + "@type" => [{ "literal" => "foo", "type" => "http://example.com/" }, + { "@value" => "foo", "@type" => "http://example.com/" }], + "@value" => [{ "literal" => "foo", "language" => "bar" }, { "@value" => "foo", "@language" => "bar" }] }.each do |title, (compacted, expanded)| it title do expect(subject.compact_value("foo", expanded)).to produce(compacted, logger) @@ -1742,49 +1815,49 @@ def containers end describe "#container" do - subject { + subject do ctx = context.parse({ - "ex" => "http://example.org/", - "graph" => {"@id" => "ex:graph", "@container" => "@graph"}, - "graphSet" => {"@id" => "ex:graphSet", "@container" => ["@graph", "@set"]}, - "graphId" => {"@id" => "ex:graphSet", "@container" => ["@graph", "@id"]}, - "graphIdSet" => {"@id" => "ex:graphSet", "@container" => ["@graph", "@id", "@set"]}, - "graphNdx" => {"@id" => "ex:graphSet", "@container" => ["@graph", "@index"]}, - "graphNdxSet" => {"@id" => "ex:graphSet", "@container" => ["@graph", "@index", "@set"]}, - "id" => {"@id" => "ex:idSet", "@container" => "@id"}, - "idSet" => {"@id" => "ex:id", "@container" => ["@id", "@set"]}, - "language" => {"@id" => "ex:language", "@container" => "@language"}, - "langSet" => {"@id" => "ex:languageSet", "@container" => ["@language", "@set"]}, - "list" => {"@id" => "ex:list", "@container" => "@list"}, - "ndx" => {"@id" => "ex:ndx", "@container" => "@index"}, - "ndxSet" => {"@id" => "ex:ndxSet", "@container" => ["@index", "@set"]}, - "set" => {"@id" => "ex:set", "@container" => "@set"}, - "type" => {"@id" => "ex:type", "@container" => "@type"}, - "typeSet" => {"@id" => "ex:typeSet", "@container" => ["@type", "@set"]}, + "ex" => "http://example.org/", + "graph" => { "@id" => "ex:graph", "@container" => "@graph" }, + "graphSet" => { "@id" => "ex:graphSet", "@container" => ["@graph", "@set"] }, + "graphId" => { "@id" => "ex:graphSet", "@container" => ["@graph", "@id"] }, + "graphIdSet" => { "@id" => "ex:graphSet", "@container" => ["@graph", "@id", "@set"] }, + "graphNdx" => { "@id" => "ex:graphSet", "@container" => ["@graph", "@index"] }, + "graphNdxSet" => { "@id" => "ex:graphSet", "@container" => ["@graph", "@index", "@set"] }, + "id" => { "@id" => "ex:idSet", "@container" => "@id" }, + "idSet" => { "@id" => "ex:id", "@container" => ["@id", "@set"] }, + "language" => { "@id" => "ex:language", "@container" => "@language" }, + "langSet" => { "@id" => "ex:languageSet", "@container" => ["@language", "@set"] }, + "list" => { "@id" => "ex:list", "@container" => "@list" }, + "ndx" => { "@id" => "ex:ndx", "@container" => "@index" }, + "ndxSet" => { "@id" => "ex:ndxSet", "@container" => ["@index", "@set"] }, + "set" => { "@id" => "ex:set", "@container" => "@set" }, + "type" => { "@id" => "ex:type", "@container" => "@type" }, + "typeSet" => { "@id" => "ex:typeSet", "@container" => ["@type", "@set"] } }) logger.clear ctx - } + end it "uses TermDefinition" do { - "ex" => Set.new, - "graph" => Set["@graph"], - "graphSet" => Set["@graph"], - "graphId" => Set["@graph", "@id"], - "graphIdSet" => Set["@graph", "@id"], - "graphNdx" => Set["@graph", "@index"], + "ex" => Set.new, + "graph" => Set["@graph"], + "graphSet" => Set["@graph"], + "graphId" => Set["@graph", "@id"], + "graphIdSet" => Set["@graph", "@id"], + "graphNdx" => Set["@graph", "@index"], "graphNdxSet" => Set["@graph", "@index"], - "id" => Set['@id'], - "idSet" => Set['@id'], - "language" => Set['@language'], - "langSet" => Set['@language'], - "list" => Set['@list'], - "ndx" => Set['@index'], - "ndxSet" => Set['@index'], - "set" => Set.new, - "type" => Set['@type'], - "typeSet" => Set['@type'], + "id" => Set['@id'], + "idSet" => Set['@id'], + "language" => Set['@language'], + "langSet" => Set['@language'], + "list" => Set['@list'], + "ndx" => Set['@index'], + "ndxSet" => Set['@index'], + "set" => Set.new, + "type" => Set['@type'], + "typeSet" => Set['@type'] }.each do |defn, container| expect(subject.container(subject.term_definitions[defn])).to eq container end @@ -1792,23 +1865,23 @@ def containers it "#as_array" do { - "ex" => false, - "graph" => false, - "graphSet" => true, - "graphId" => false, - "graphIdSet" => true, - "graphNdx" => false, + "ex" => false, + "graph" => false, + "graphSet" => true, + "graphId" => false, + "graphIdSet" => true, + "graphNdx" => false, "graphNdxSet" => true, - "id" => false, - "idSet" => true, - "language" => false, - "langSet" => true, - "list" => true, - "ndx" => false, - "ndxSet" => true, - "set" => true, - "type" => false, - "typeSet" => true, + "id" => false, + "idSet" => true, + "language" => false, + "langSet" => true, + "list" => true, + "ndx" => false, + "ndxSet" => true, + "set" => true, + "type" => false, + "typeSet" => true }.each do |defn, as_array| expect(subject.as_array?(subject.term_definitions[defn])).to eq as_array end @@ -1816,23 +1889,23 @@ def containers it "uses array" do { - "ex" => Set.new, - "graph" => Set["@graph"], - "graphSet" => Set["@graph"], - "graphId" => Set["@graph", "@id"], - "graphIdSet" => Set["@graph", "@id"], - "graphNdx" => Set["@graph", "@index"], + "ex" => Set.new, + "graph" => Set["@graph"], + "graphSet" => Set["@graph"], + "graphId" => Set["@graph", "@id"], + "graphIdSet" => Set["@graph", "@id"], + "graphNdx" => Set["@graph", "@index"], "graphNdxSet" => Set["@graph", "@index"], - "id" => Set['@id'], - "idSet" => Set['@id'], - "language" => Set['@language'], - "langSet" => Set['@language'], - "list" => Set['@list'], - "ndx" => Set['@index'], - "ndxSet" => Set['@index'], - "set" => Set.new, - "type" => Set['@type'], - "typeSet" => Set['@type'], + "id" => Set['@id'], + "idSet" => Set['@id'], + "language" => Set['@language'], + "langSet" => Set['@language'], + "list" => Set['@list'], + "ndx" => Set['@index'], + "ndxSet" => Set['@index'], + "set" => Set.new, + "type" => Set['@type'], + "typeSet" => Set['@type'] }.each do |defn, container| expect(subject.container(defn)).to eq container end @@ -1840,15 +1913,16 @@ def containers end describe "#language" do - subject { + subject do ctx = context.parse({ "ex" => "http://example.org/", - "nil" => {"@id" => "ex:nil", "@language" => nil}, - "en" => {"@id" => "ex:en", "@language" => "en"}, + "nil" => { "@id" => "ex:nil", "@language" => nil }, + "en" => { "@id" => "ex:en", "@language" => "en" } }) logger.clear ctx - } + end + it "uses TermDefinition" do expect(subject.language(subject.term_definitions['ex'])).to be_falsey expect(subject.language(subject.term_definitions['nil'])).to be_falsey @@ -1863,43 +1937,44 @@ def containers end describe "#reverse?" do - subject { + subject do ctx = context.parse({ "ex" => "http://example.org/", - "reverse" => {"@reverse" => "ex:reverse"}, + "reverse" => { "@reverse" => "ex:reverse" } }) logger.clear ctx - } + end + it "uses TermDefinition" do - expect(subject.reverse?(subject.term_definitions['ex'])).to be_falsey - expect(subject.reverse?(subject.term_definitions['reverse'])).to be_truthy + expect(subject).not_to be_reverse(subject.term_definitions['ex']) + expect(subject).to be_reverse(subject.term_definitions['reverse']) end it "uses string" do - expect(subject.reverse?('ex')).to be_falsey - expect(subject.reverse?('reverse')).to be_truthy + expect(subject).not_to be_reverse('ex') + expect(subject).to be_reverse('reverse') end end describe "#nest" do - subject { + subject do ctx = context.parse({ - "ex" => "http://example.org/", - "nest" => {"@id" => "ex:nest", "@nest" => "@nest"}, - "nest2" => {"@id" => "ex:nest2", "@nest" => "nest-alias"}, - "nest-alias" => "@nest" + "ex" => "http://example.org/", + "nest" => { "@id" => "ex:nest", "@nest" => "@nest" }, + "nest2" => { "@id" => "ex:nest2", "@nest" => "nest-alias" }, + "nest-alias" => "@nest" }) logger.clear ctx - } + end it "uses term" do { - "ex" => nil, - "nest" => "@nest", - "nest2" => "nest-alias", - "nest-alias" => nil, + "ex" => nil, + "nest" => "@nest", + "nest2" => "nest-alias", + "nest-alias" => nil }.each do |defn, nest| expect(subject.nest(defn)).to eq nest end @@ -1907,28 +1982,29 @@ def containers context "detects error" do it "does not allow a keyword other than @nest for the value of @nest" do - expect { - context.parse({"no-keyword-nest" => {"@id" => "http://example/f", "@nest" => "@id"}}) - }.to raise_error JSON::LD::JsonLdError::InvalidNestValue + expect do + context.parse({ "no-keyword-nest" => { "@id" => "http://example/f", "@nest" => "@id" } }) + end.to raise_error JSON::LD::JsonLdError::InvalidNestValue end it "does not allow @nest with @reverse" do - expect { - context.parse({"no-reverse-nest" => {"@reverse" => "http://example/f", "@nest" => "@nest"}}) - }.to raise_error JSON::LD::JsonLdError::InvalidReverseProperty + expect do + context.parse({ "no-reverse-nest" => { "@reverse" => "http://example/f", "@nest" => "@nest" } }) + end.to raise_error JSON::LD::JsonLdError::InvalidReverseProperty end end end describe "#reverse_term" do - subject { + subject do ctx = context.parse({ "ex" => "http://example.org/", - "reverse" => {"@reverse" => "ex"}, + "reverse" => { "@reverse" => "ex" } }) logger.clear ctx - } + end + it "uses TermDefinition" do expect(subject.reverse_term(subject.term_definitions['ex'])).to eql subject.term_definitions['reverse'] expect(subject.reverse_term(subject.term_definitions['reverse'])).to eql subject.term_definitions['ex'] @@ -1943,8 +2019,8 @@ def containers describe "protected contexts" do it "seals a term with @protected true" do ctx = context.parse({ - "protected" => {"@id" => "http://example.com/protected", "@protected" => true}, - "unprotected" => {"@id" => "http://example.com/unprotected"}, + "protected" => { "@id" => "http://example.com/protected", "@protected" => true }, + "unprotected" => { "@id" => "http://example.com/unprotected" } }) expect(ctx.term_definitions["protected"]).to be_protected expect(ctx.term_definitions["unprotected"]).not_to be_protected @@ -1953,8 +2029,8 @@ def containers it "seals all term with @protected true in context" do ctx = context.parse({ "@protected" => true, - "protected" => {"@id" => "http://example.com/protected"}, - "protected2" => {"@id" => "http://example.com/protected2"}, + "protected" => { "@id" => "http://example.com/protected" }, + "protected2" => { "@id" => "http://example.com/protected2" } }) expect(ctx.term_definitions["protected"]).to be_protected expect(ctx.term_definitions["protected2"]).to be_protected @@ -1963,8 +2039,8 @@ def containers it "does not seal term with @protected: false when context is protected" do ctx = context.parse({ "@protected" => true, - "protected" => {"@id" => "http://example.com/protected"}, - "unprotected" => {"@id" => "http://example.com/unprotected", "@protected" => false}, + "protected" => { "@id" => "http://example.com/protected" }, + "unprotected" => { "@id" => "http://example.com/unprotected", "@protected" => false } }) expect(ctx.term_definitions["protected"]).to be_protected expect(ctx.term_definitions["unprotected"]).not_to be_protected @@ -1972,85 +2048,96 @@ def containers it "does not error when redefining an identical term" do c = { - "protected" => {"@id" => "http://example.com/protected", "@protected" => true} + "protected" => { "@id" => "http://example.com/protected", "@protected" => true } } ctx = context.parse(c) - expect {ctx.parse(c)}.not_to raise_error + expect { ctx.parse(c) }.not_to raise_error end it "errors when redefining a protected term" do ctx = context.parse({ - "protected" => {"@id" => "http://example.com/protected", "@protected" => true} + "protected" => { "@id" => "http://example.com/protected", "@protected" => true } }) - expect {ctx.parse({"protected" => "http://example.com/different"})}.to raise_error(JSON::LD::JsonLdError::ProtectedTermRedefinition) + expect do + ctx.parse({ "protected" => "http://example.com/different" }) + end.to raise_error(JSON::LD::JsonLdError::ProtectedTermRedefinition) end it "errors when clearing a context having protected terms" do ctx = context.parse({ - "protected" => {"@id" => "http://example.com/protected", "@protected" => true} + "protected" => { "@id" => "http://example.com/protected", "@protected" => true } }) - expect {ctx.parse(nil)}.to raise_error(JSON::LD::JsonLdError::InvalidContextNullification) + expect { ctx.parse(nil) }.to raise_error(JSON::LD::JsonLdError::InvalidContextNullification) end end describe JSON::LD::Context::TermDefinition do context "with nothing" do - subject {described_class.new("term")} - its(:term) {is_expected.to eq "term"} - its(:id) {is_expected.to be_nil} - its(:to_rb) {is_expected.to eq %(TermDefinition.new("term"))} + subject { described_class.new("term") } + + its(:term) { is_expected.to eq "term" } + its(:id) { is_expected.to be_nil } + its(:to_rb) { is_expected.to eq %(TermDefinition.new("term")) } end context "with id" do - subject {described_class.new("term", id: "http://example.org/term")} - its(:term) {is_expected.to eq "term"} - its(:id) {is_expected.to eq "http://example.org/term"} - its(:to_rb) {is_expected.to eq %(TermDefinition.new("term", id: "http://example.org/term"))} + subject { described_class.new("term", id: "http://example.org/term") } + + its(:term) { is_expected.to eq "term" } + its(:id) { is_expected.to eq "http://example.org/term" } + its(:to_rb) { is_expected.to eq %(TermDefinition.new("term", id: "http://example.org/term")) } end context "with type_mapping" do - subject {described_class.new("term", type_mapping: "http://example.org/type")} - its(:type_mapping) {is_expected.to eq "http://example.org/type"} - its(:to_rb) {is_expected.to eq %(TermDefinition.new("term", type_mapping: "http://example.org/type"))} + subject { described_class.new("term", type_mapping: "http://example.org/type") } + + its(:type_mapping) { is_expected.to eq "http://example.org/type" } + its(:to_rb) { is_expected.to eq %(TermDefinition.new("term", type_mapping: "http://example.org/type")) } end context "with container_mapping @set" do - subject {described_class.new("term", container_mapping: "@set")} - its(:container_mapping) {is_expected.to be_empty} - its(:to_rb) {is_expected.to eq %(TermDefinition.new("term", container_mapping: "@set"))} + subject { described_class.new("term", container_mapping: "@set") } + + its(:container_mapping) { is_expected.to be_empty } + its(:to_rb) { is_expected.to eq %(TermDefinition.new("term", container_mapping: "@set")) } end context "with container_mapping @id @set" do - subject {described_class.new("term", container_mapping: %w(@id @set))} - its(:container_mapping) {is_expected.to eq Set['@id']} - its(:to_rb) {is_expected.to eq %(TermDefinition.new("term", container_mapping: ["@id", "@set"]))} + subject { described_class.new("term", container_mapping: %w[@id @set]) } + + its(:container_mapping) { is_expected.to eq Set['@id'] } + its(:to_rb) { is_expected.to eq %(TermDefinition.new("term", container_mapping: ["@id", "@set"])) } end context "with container_mapping @list" do - subject {described_class.new("term", container_mapping: "@list")} - its(:container_mapping) {is_expected.to eq Set['@list']} - its(:to_rb) {is_expected.to eq %(TermDefinition.new("term", container_mapping: "@list"))} + subject { described_class.new("term", container_mapping: "@list") } + + its(:container_mapping) { is_expected.to eq Set['@list'] } + its(:to_rb) { is_expected.to eq %(TermDefinition.new("term", container_mapping: "@list")) } end context "with language_mapping" do - subject {described_class.new("term", language_mapping: "en")} - its(:language_mapping) {is_expected.to eq "en"} - its(:to_rb) {is_expected.to eq %(TermDefinition.new("term", language_mapping: "en"))} + subject { described_class.new("term", language_mapping: "en") } + + its(:language_mapping) { is_expected.to eq "en" } + its(:to_rb) { is_expected.to eq %(TermDefinition.new("term", language_mapping: "en")) } end context "with reverse_property" do - subject {described_class.new("term", reverse_property: true)} - its(:reverse_property) {is_expected.to be_truthy} - its(:to_rb) {is_expected.to eq %(TermDefinition.new("term", reverse_property: true))} + subject { described_class.new("term", reverse_property: true) } + + its(:reverse_property) { is_expected.to be_truthy } + its(:to_rb) { is_expected.to eq %(TermDefinition.new("term", reverse_property: true)) } end context "with simple" do - subject {described_class.new("term", simple: true)} - its(:simple) {is_expected.to be_truthy} - its(:to_rb) {is_expected.to eq %(TermDefinition.new("term", simple: true))} + subject { described_class.new("term", simple: true) } + + its(:simple) { is_expected.to be_truthy } + its(:to_rb) { is_expected.to eq %(TermDefinition.new("term", simple: true)) } end end end diff --git a/spec/expand_spec.rb b/spec/expand_spec.rb index ccd4473e..d9f2b785 100644 --- a/spec/expand_spec.rb +++ b/spec/expand_spec.rb @@ -1,16 +1,17 @@ -# coding: utf-8 +# frozen_string_literal: true + require_relative 'spec_helper' describe JSON::LD::API do - let(:logger) {RDF::Spec.logger} + let(:logger) { RDF::Spec.logger } describe ".expand" do { - "empty doc": { + 'empty doc': { input: {}, output: [] }, - "@list coercion": { + '@list coercion': { input: %({ "@context": { "foo": {"@id": "http://example.com/foo", "@container": "@list"} @@ -21,7 +22,7 @@ "http://example.com/foo": [{"@list": [{"@value": "bar"}]}] }]) }, - "native values in list": { + 'native values in list': { input: %({ "http://example.com/foo": {"@list": [1, 2]} }), @@ -29,7 +30,7 @@ "http://example.com/foo": [{"@list": [{"@value": 1}, {"@value": 2}]}] }]) }, - "@graph": { + '@graph': { input: %({ "@context": {"ex": "http://example.com/"}, "@graph": [ @@ -42,7 +43,7 @@ {"http://example.com/bar": [{"@value": "bar"}]} ]) }, - "@graph value (expands to array form)": { + '@graph value (expands to array form)': { input: %({ "@context": {"ex": "http://example.com/"}, "ex:p": { @@ -61,7 +62,7 @@ }] }]) }, - "@type with CURIE": { + '@type with CURIE': { input: %({ "@context": {"ex": "http://example.com/"}, "@type": "ex:type" @@ -70,7 +71,7 @@ {"@type": ["http://example.com/type"]} ]) }, - "@type with CURIE and muliple values": { + '@type with CURIE and muliple values': { input: %({ "@context": {"ex": "http://example.com/"}, "@type": ["ex:type1", "ex:type2"] @@ -79,11 +80,11 @@ {"@type": ["http://example.com/type1", "http://example.com/type2"]} ]) }, - "@value with false": { + '@value with false': { input: %({"http://example.com/ex": {"@value": false}}), output: %([{"http://example.com/ex": [{"@value": false}]}]) }, - "compact IRI": { + 'compact IRI': { input: %({ "@context": {"ex": "http://example.com/"}, "ex:p": {"@id": "ex:Sub1"} @@ -91,14 +92,14 @@ output: %([{ "http://example.com/p": [{"@id": "http://example.com/Sub1"}] }]) - }, + } }.each_pair do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end context "default language" do { - "base": { + base: { input: %({ "http://example/foo": "bar" }), @@ -107,7 +108,7 @@ }]), language: "en" }, - "override": { + override: { input: %({ "@context": {"@language": null}, "http://example/foo": "bar" @@ -118,13 +119,13 @@ language: "en" } }.each_pair do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end end context "with relative IRIs" do { - "base": { + base: { input: %({ "@id": "", "@type": "http://www.w3.org/2000/01/rdf-schema#Resource" @@ -134,7 +135,7 @@ "@type": ["http://www.w3.org/2000/01/rdf-schema#Resource"] }]) }, - "relative": { + relative: { input: %({ "@id": "a/b", "@type": "http://www.w3.org/2000/01/rdf-schema#Resource" @@ -144,7 +145,7 @@ "@type": ["http://www.w3.org/2000/01/rdf-schema#Resource"] }]) }, - "hash": { + hash: { input: %({ "@id": "#a", "@type": "http://www.w3.org/2000/01/rdf-schema#Resource" @@ -154,7 +155,7 @@ "@type": ["http://www.w3.org/2000/01/rdf-schema#Resource"] }]) }, - "unmapped @id": { + 'unmapped @id': { input: %({ "http://example.com/foo": {"@id": "bar"} }), @@ -162,7 +163,7 @@ "http://example.com/foo": [{"@id": "http://example.org/bar"}] }]) }, - "json-ld-syntax#66": { + 'json-ld-syntax#66': { input: %({ "@context": { "@base": "https://ex.org/", @@ -178,13 +179,13 @@ }]) } }.each do |title, params| - it(title) {run_expand params.merge(base: "http://example.org/")} + it(title) { run_expand params.merge(base: "http://example.org/") } end end context "with relative property IRIs" do { - "base": { + base: { input: %({ "http://a/b": "foo" }), @@ -192,31 +193,31 @@ "http://a/b": [{"@value": "foo"}] }]) }, - "relative": { + relative: { input: %({ "a/b": "foo" }), output: %([]) }, - "hash": { + hash: { input: %({ "#a": "foo" }), output: %([]) }, - "dotseg": { + dotseg: { input: %({ "../a": "foo" }), output: %([]) - }, + } }.each do |title, params| - it(title) {run_expand params.merge(base: "http://example.org/")} + it(title) { run_expand params.merge(base: "http://example.org/") } end context "with @vocab" do { - "base": { + base: { input: %({ "@context": {"@vocab": "http://vocab/"}, "http://a/b": "foo" @@ -225,7 +226,7 @@ "http://a/b": [{"@value": "foo"}] }]) }, - "relative": { + relative: { input: %({ "@context": {"@vocab": "http://vocab/"}, "a/b": "foo" @@ -234,7 +235,7 @@ "http://vocab/a/b": [{"@value": "foo"}] }]) }, - "hash": { + hash: { input: %({ "@context": {"@vocab": "http://vocab/"}, "#a": "foo" @@ -243,7 +244,7 @@ "http://vocab/#a": [{"@value": "foo"}] }]) }, - "dotseg": { + dotseg: { input: %({ "@context": {"@vocab": "http://vocab/"}, "../a": "foo" @@ -251,15 +252,15 @@ output: %([{ "http://vocab/../a": [{"@value": "foo"}] }]) - }, + } }.each do |title, params| - it(title) {run_expand params.merge(base: "http://example.org/")} + it(title) { run_expand params.merge(base: "http://example.org/") } end end context "with @vocab: ''" do { - "base": { + base: { input: %({ "@context": {"@vocab": ""}, "http://a/b": "foo" @@ -268,7 +269,7 @@ "http://a/b": [{"@value": "foo"}] }]) }, - "relative": { + relative: { input: %({ "@context": {"@vocab": ""}, "a/b": "foo" @@ -277,7 +278,7 @@ "http://example.org/a/b": [{"@value": "foo"}] }]) }, - "hash": { + hash: { input: %({ "@context": {"@vocab": ""}, "#a": "foo" @@ -286,7 +287,7 @@ "http://example.org/#a": [{"@value": "foo"}] }]) }, - "dotseg": { + dotseg: { input: %({ "@context": {"@vocab": ""}, "../a": "foo" @@ -295,7 +296,7 @@ "http://example.org/../a": [{"@value": "foo"}] }]) }, - "example": { + example: { input: %({ "@context": { "@base": "http://example/document", @@ -312,13 +313,13 @@ }]) } }.each do |title, params| - it(title) {run_expand params.merge(base: "http://example.org/")} + it(title) { run_expand params.merge(base: "http://example.org/") } end end context "with @vocab: '/relative#'" do { - "base": { + base: { input: %({ "@context": {"@vocab": "/relative#"}, "http://a/b": "foo" @@ -327,7 +328,7 @@ "http://a/b": [{"@value": "foo"}] }]) }, - "relative": { + relative: { input: %({ "@context": {"@vocab": "/relative#"}, "a/b": "foo" @@ -336,7 +337,7 @@ "http://example.org/relative#a/b": [{"@value": "foo"}] }]) }, - "hash": { + hash: { input: %({ "@context": {"@vocab": "/relative#"}, "#a": "foo" @@ -345,7 +346,7 @@ "http://example.org/relative##a": [{"@value": "foo"}] }]) }, - "dotseg": { + dotseg: { input: %({ "@context": {"@vocab": "/relative#"}, "../a": "foo" @@ -354,7 +355,7 @@ "http://example.org/relative#../a": [{"@value": "foo"}] }]) }, - "example": { + example: { input: %({ "@context": { "@base": "http://example/document", @@ -371,14 +372,14 @@ }]) } }.each do |title, params| - it(title) {run_expand params.merge(base: "http://example.org/")} + it(title) { run_expand params.merge(base: "http://example.org/") } end end end context "keyword aliasing" do { - "@id": { + '@id': { input: %({ "@context": {"id": "@id"}, "id": "", @@ -389,7 +390,7 @@ "@type":[ "http://www.w3.org/2000/01/rdf-schema#Resource"] }]) }, - "@type": { + '@type': { input: %({ "@context": {"type": "@type"}, "type": "http://www.w3.org/2000/01/rdf-schema#Resource", @@ -400,7 +401,7 @@ "http://example.com/foo": [{"@value": "bar", "@type": "http://example.com/baz"}] }]) }, - "@language": { + '@language': { input: %({ "@context": {"language": "@language"}, "http://example.com/foo": {"@value": "bar", "language": "baz"} @@ -409,7 +410,7 @@ "http://example.com/foo": [{"@value": "bar", "@language": "baz"}] }]) }, - "@value": { + '@value': { input: %({ "@context": {"literal": "@value"}, "http://example.com/foo": {"literal": "bar"} @@ -418,7 +419,7 @@ "http://example.com/foo": [{"@value": "bar"}] }]) }, - "@list": { + '@list': { input: %({ "@context": {"list": "@list"}, "http://example.com/foo": {"list": ["bar"]} @@ -426,15 +427,15 @@ output: %([{ "http://example.com/foo": [{"@list": [{"@value": "bar"}]}] }]) - }, + } }.each do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end end context "native types" do { - "true": { + true => { input: %({ "@context": {"e": "http://example.org/vocab#"}, "e:bool": true @@ -443,7 +444,7 @@ "http://example.org/vocab#bool": [{"@value": true}] }]) }, - "false": { + false => { input: %({ "@context": {"e": "http://example.org/vocab#"}, "e:bool": false @@ -452,7 +453,7 @@ "http://example.org/vocab#bool": [{"@value": false}] }]) }, - "double": { + double: { input: %({ "@context": {"e": "http://example.org/vocab#"}, "e:double": 1.23 @@ -461,7 +462,7 @@ "http://example.org/vocab#double": [{"@value": 1.23}] }]) }, - "double-zero": { + 'double-zero': { input: %({ "@context": {"e": "http://example.org/vocab#"}, "e:double-zero": 0.0e0 @@ -470,7 +471,7 @@ "http://example.org/vocab#double-zero": [{"@value": 0.0e0}] }]) }, - "integer": { + integer: { input: %({ "@context": {"e": "http://example.org/vocab#"}, "e:integer": 123 @@ -478,23 +479,23 @@ output: %([{ "http://example.org/vocab#integer": [{"@value": 123}] }]) - }, + } }.each do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end context "with @type: @none" do { - "true": { + true => { input: %({ "@context": {"e": {"@id": "http://example.org/vocab#bool", "@type": "@none"}}, "e": true }), - output:%( [{ + output: %( [{ "http://example.org/vocab#bool": [{"@value": true}] }]) }, - "false": { + false => { input: %({ "@context": {"e": {"@id": "http://example.org/vocab#bool", "@type": "@none"}}, "e": false @@ -503,7 +504,7 @@ "http://example.org/vocab#bool": [{"@value": false}] }]) }, - "double": { + double: { input: %({ "@context": {"e": {"@id": "http://example.org/vocab#double", "@type": "@none"}}, "e": 1.23 @@ -512,7 +513,7 @@ "http://example.org/vocab#double": [{"@value": 1.23}] }]) }, - "double-zero": { + 'double-zero': { input: %({ "@context": {"e": {"@id": "http://example.org/vocab#double", "@type": "@none"}}, "e": 0.0e0 @@ -521,7 +522,7 @@ "http://example.org/vocab#double": [{"@value": 0.0e0}] }]) }, - "integer": { + integer: { input: %({ "@context": {"e": {"@id": "http://example.org/vocab#integer", "@type": "@none"}}, "e": 123 @@ -529,24 +530,24 @@ output: %([{ "http://example.org/vocab#integer": [{"@value": 123}] }]) - }, + } }.each do |title, params| - it(title) {run_expand(processingMode: "json-ld-1.1", **params)} + it(title) { run_expand(processingMode: "json-ld-1.1", **params) } end end context "with @type: @id" do { - "true": { + true => { input: %({ "@context": {"e": {"@id": "http://example.org/vocab#bool", "@type": "@id"}}, "e": true }), - output:%( [{ + output: %( [{ "http://example.org/vocab#bool": [{"@value": true}] }]) }, - "false": { + false => { input: %({ "@context": {"e": {"@id": "http://example.org/vocab#bool", "@type": "@id"}}, "e": false @@ -555,7 +556,7 @@ "http://example.org/vocab#bool": [{"@value": false}] }]) }, - "double": { + double: { input: %({ "@context": {"e": {"@id": "http://example.org/vocab#double", "@type": "@id"}}, "e": 1.23 @@ -564,7 +565,7 @@ "http://example.org/vocab#double": [{"@value": 1.23}] }]) }, - "double-zero": { + 'double-zero': { input: %({ "@context": {"e": {"@id": "http://example.org/vocab#double", "@type": "@id"}}, "e": 0.0e0 @@ -573,7 +574,7 @@ "http://example.org/vocab#double": [{"@value": 0.0e0}] }]) }, - "integer": { + integer: { input: %({ "@context": {"e": {"@id": "http://example.org/vocab#integer", "@type": "@id"}}, "e": 123 @@ -581,24 +582,24 @@ output: %([{ "http://example.org/vocab#integer": [{"@value": 123}] }]) - }, + } }.each do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end end context "with @type: @vocab" do { - "true": { + true => { input: %({ "@context": {"e": {"@id": "http://example.org/vocab#bool", "@type": "@vocab"}}, "e": true }), - output:%( [{ + output: %( [{ "http://example.org/vocab#bool": [{"@value": true}] }]) }, - "false": { + false => { input: %({ "@context": {"e": {"@id": "http://example.org/vocab#bool", "@type": "@vocab"}}, "e": false @@ -607,7 +608,7 @@ "http://example.org/vocab#bool": [{"@value": false}] }]) }, - "double": { + double: { input: %({ "@context": {"e": {"@id": "http://example.org/vocab#double", "@type": "@vocab"}}, "e": 1.23 @@ -616,7 +617,7 @@ "http://example.org/vocab#double": [{"@value": 1.23}] }]) }, - "double-zero": { + 'double-zero': { input: %({ "@context": {"e": {"@id": "http://example.org/vocab#double", "@type": "@vocab"}}, "e": 0.0e0 @@ -625,7 +626,7 @@ "http://example.org/vocab#double": [{"@value": 0.0e0}] }]) }, - "integer": { + integer: { input: %({ "@context": {"e": {"@id": "http://example.org/vocab#integer", "@type": "@vocab"}}, "e": 123 @@ -633,16 +634,16 @@ output: %([{ "http://example.org/vocab#integer": [{"@value": 123}] }]) - }, + } }.each do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end end end context "with @type: @json" do { - "true": { + true => { input: %({ "@context": { "@version": 1.1, @@ -650,11 +651,11 @@ }, "e": true }), - output:%( [{ + output: %( [{ "http://example.org/vocab#bool": [{"@value": true, "@type": "@json"}] }]) }, - "false": { + false => { input: %({ "@context": { "@version": 1.1, @@ -666,7 +667,7 @@ "http://example.org/vocab#bool": [{"@value": false, "@type": "@json"}] }]) }, - "double": { + double: { input: %({ "@context": { "@version": 1.1, @@ -678,7 +679,7 @@ "http://example.org/vocab#double": [{"@value": 1.23, "@type": "@json"}] }]) }, - "double-zero": { + 'double-zero': { input: %({ "@context": { "@version": 1.1, @@ -690,7 +691,7 @@ "http://example.org/vocab#double": [{"@value": 0.0e0, "@type": "@json"}] }]) }, - "integer": { + integer: { input: %({ "@context": { "@version": 1.1, @@ -702,7 +703,7 @@ "http://example.org/vocab#integer": [{"@value": 123, "@type": "@json"}] }]) }, - "string": { + string: { input: %({ "@context": { "@version": 1.1, @@ -717,7 +718,7 @@ }] }]) }, - "null": { + null: { input: %({ "@context": { "@version": 1.1, @@ -732,7 +733,7 @@ }] }]) }, - "object": { + object: { input: %({ "@context": { "@version": 1.1, @@ -744,7 +745,7 @@ "http://example.org/vocab#object": [{"@value": {"foo": "bar"}, "@type": "@json"}] }]) }, - "array": { + array: { input: %({ "@context": { "@version": 1.1, @@ -756,7 +757,7 @@ "http://example.org/vocab#array": [{"@value": [{"foo": "bar"}], "@type": "@json"}] }]) }, - "Does not expand terms inside json": { + 'Does not expand terms inside json': { input: %({ "@context": { "@version": 1.1, @@ -768,7 +769,7 @@ "http://example.org/vocab#array": [{"@value": [{"e": "bar"}], "@type": "@json"}] }]) }, - "Already expanded object": { + 'Already expanded object': { input: %({ "http://example.org/vocab#object": [{"@value": {"foo": "bar"}, "@type": "@json"}] }), @@ -777,7 +778,7 @@ }]), processingMode: 'json-ld-1.1' }, - "Already expanded object with aliased keys": { + 'Already expanded object with aliased keys': { input: %({ "@context": {"@version": 1.1, "value": "@value", "type": "@type", "json": "@json"}, "http://example.org/vocab#object": [{"value": {"foo": "bar"}, "type": "json"}] @@ -785,15 +786,15 @@ output: %([{ "http://example.org/vocab#object": [{"@value": {"foo": "bar"}, "@type": "@json"}] }]) - }, + } }.each do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end end context "coerced typed values" do { - "boolean": { + boolean: { input: %({ "@context": {"foo": {"@id": "http://example.org/foo", "@type": "http://www.w3.org/2001/XMLSchema#boolean"}}, "foo": "true" @@ -802,7 +803,7 @@ "http://example.org/foo": [{"@value": "true", "@type": "http://www.w3.org/2001/XMLSchema#boolean"}] }]) }, - "date": { + date: { input: %({ "@context": {"foo": {"@id": "http://example.org/foo", "@type": "http://www.w3.org/2001/XMLSchema#date"}}, "foo": "2011-03-26" @@ -810,35 +811,35 @@ output: %([{ "http://example.org/foo": [{"@value": "2011-03-26", "@type": "http://www.w3.org/2001/XMLSchema#date"}] }]) - }, + } }.each do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end end context "null" do { - "value": { + value: { input: %({"http://example.com/foo": null}), output: [] }, - "@value": { + '@value': { input: %({"http://example.com/foo": {"@value": null}}), output: [] }, - "@value and non-null @type": { + '@value and non-null @type': { input: %({"http://example.com/foo": {"@value": null, "@type": "http://type"}}), output: [] }, - "@value and non-null @language": { + '@value and non-null @language': { input: %({"http://example.com/foo": {"@value": null, "@language": "en"}}), output: [] }, - "array with null elements": { + 'array with null elements': { input: %({"http://example.com/foo": [null]}), output: %([{"http://example.com/foo": []}]) }, - "@set with null @value": { + '@set with null @value': { input: %({ "http://example.com/foo": [ {"@value": null, "@type": "http://example.org/Type"} @@ -849,13 +850,13 @@ }]) } }.each do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end end context "@direction" do { - "value with coerced null direction": { + 'value with coerced null direction': { input: %({ "@context": { "@direction": "rtl", @@ -876,13 +877,13 @@ ]) } }.each_pair do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end end context "default language" do { - "value with coerced null language": { + 'value with coerced null language': { input: %({ "@context": { "@language": "en", @@ -899,14 +900,14 @@ "http://example.org/vocab#nolang": [{"@value": "no language"}] } ]) - }, + } }.each do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end context "and default direction" do { - "value with coerced null direction": { + 'value with coerced null direction': { input: %({ "@context": { "@language": "en", @@ -946,14 +947,14 @@ ]) } }.each_pair do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end end end context "default vocabulary" do { - "property": { + property: { input: %({ "@context": {"@vocab": "http://example.com/"}, "verb": {"@value": "foo"} @@ -962,7 +963,7 @@ "http://example.com/verb": [{"@value": "foo"}] }]) }, - "datatype": { + datatype: { input: %({ "@context": {"@vocab": "http://example.com/"}, "http://example.org/verb": {"@value": "foo", "@type": "string"} @@ -971,7 +972,7 @@ "http://example.org/verb": [{"@value": "foo", "@type": "http://example.com/string"}] }]) }, - "expand-0028": { + 'expand-0028': { input: %({ "@context": { "@vocab": "http://example.org/vocab#", @@ -1010,17 +1011,17 @@ ]) } }.each do |title, params| - it(title) {run_expand params.merge(base: "http://foo/bar/")} + it(title) { run_expand params.merge(base: "http://foo/bar/") } end end context "unmapped properties" do { - "unmapped key": { + 'unmapped key': { input: %({"foo": "bar"}), output: [] }, - "unmapped @type as datatype": { + 'unmapped @type as datatype': { input: %({ "http://example.com/foo": {"@value": "bar", "@type": "baz"} }), @@ -1028,11 +1029,11 @@ "http://example.com/foo": [{"@value": "bar", "@type": "http://example/baz"}] }]) }, - "unknown keyword": { + 'unknown keyword': { input: %({"@foo": "bar"}), output: [] }, - "value": { + value: { input: %({ "@context": {"ex": {"@id": "http://example.org/idrange", "@type": "@id"}}, "@id": "http://example.org/Subj", @@ -1040,7 +1041,7 @@ }), output: [] }, - "context reset": { + 'context reset': { input: %({ "@context": {"ex": "http://example.org/", "prop": "ex:prop"}, "@id": "http://example.org/id1", @@ -1058,13 +1059,13 @@ }]) } }.each do |title, params| - it(title) {run_expand params.merge(base: "http://example/")} + it(title) { run_expand params.merge(base: "http://example/") } end end context "@container: @index" do { - "string annotation": { + 'string annotation': { input: %({ "@context": { "container": { @@ -1086,14 +1087,14 @@ {"@value": "The Queen", "@index": "en"} ] }]) - }, + } }.each do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end context "@index: property" do { - "error if @version is json-ld-1.0": { + 'error if @version is json-ld-1.0': { input: %({ "@context": { "@vocab": "http://example.com/", @@ -1108,7 +1109,7 @@ exception: JSON::LD::JsonLdError::InvalidTermDefinition, processingMode: 'json-ld-1.0' }, - "error if @container does not include @index": { + 'error if @container does not include @index': { input: %({ "@context": { "@version": 1.1, @@ -1123,7 +1124,7 @@ }), exception: JSON::LD::JsonLdError::InvalidTermDefinition }, - "error if @index is a keyword": { + 'error if @index is a keyword': { input: %({ "@context": { "@version": 1.1, @@ -1142,7 +1143,7 @@ }), exception: JSON::LD::JsonLdError::InvalidTermDefinition }, - "error if @index is not a string": { + 'error if @index is not a string': { input: %({ "@context": { "@version": 1.1, @@ -1161,7 +1162,7 @@ }), exception: JSON::LD::JsonLdError::InvalidTermDefinition }, - "error if attempting to add property to value object": { + 'error if attempting to add property to value object': { input: %({ "@context": { "@version": 1.1, @@ -1180,7 +1181,7 @@ }), exception: JSON::LD::JsonLdError::InvalidValueObject }, - "property-valued index expands to property value, instead of @index (value)": { + 'property-valued index expands to property value, instead of @index (value)': { input: %({ "@context": { "@version": 1.1, @@ -1203,7 +1204,7 @@ ] }]) }, - "property-valued index appends to property value, instead of @index (value)": { + 'property-valued index appends to property value, instead of @index (value)': { input: %({ "@context": { "@version": 1.1, @@ -1229,7 +1230,7 @@ ] }]) }, - "property-valued index expands to property value, instead of @index (node)": { + 'property-valued index expands to property value, instead of @index (node)': { input: %({ "@context": { "@version": 1.1, @@ -1253,7 +1254,7 @@ ] }]) }, - "property-valued index appends to property value, instead of @index (node)": { + 'property-valued index appends to property value, instead of @index (node)': { input: %({ "@context": { "@version": 1.1, @@ -1280,7 +1281,7 @@ ] }]) }, - "property-valued index does not output property for @none": { + 'property-valued index does not output property for @none': { input: %({ "@context": { "@version": 1.1, @@ -1306,34 +1307,34 @@ {"@id": "http://example.com/person/3", "http://example.com/prop": [{"@id": "http://example.com/guest"}]} ] }]) - }, + } }.each do |title, params| - it(title) {run_expand(validate: true, **params)} + it(title) { run_expand(validate: true, **params) } end end end context "@container: @list" do { - "empty": { + empty: { input: %({"http://example.com/foo": {"@list": []}}), output: %([{"http://example.com/foo": [{"@list": []}]}]) }, - "coerced empty": { + 'coerced empty': { input: %({ "@context": {"http://example.com/foo": {"@container": "@list"}}, "http://example.com/foo": [] }), output: %([{"http://example.com/foo": [{"@list": []}]}]) }, - "coerced single element": { + 'coerced single element': { input: %({ "@context": {"http://example.com/foo": {"@container": "@list"}}, "http://example.com/foo": [ "foo" ] }), output: %([{"http://example.com/foo": [{"@list": [{"@value": "foo"}]}]}]) }, - "coerced multiple elements": { + 'coerced multiple elements': { input: %({ "@context": {"http://example.com/foo": {"@container": "@list"}}, "http://example.com/foo": [ "foo", "bar" ] @@ -1342,7 +1343,7 @@ "http://example.com/foo": [{"@list": [ {"@value": "foo"}, {"@value": "bar"} ]}] }]) }, - "native values in list": { + 'native values in list': { input: %({ "http://example.com/foo": {"@list": [1, 2]} }), @@ -1350,7 +1351,7 @@ "http://example.com/foo": [{"@list": [{"@value": 1}, {"@value": 2}]}] }]) }, - "explicit list with coerced @id values": { + 'explicit list with coerced @id values': { input: %({ "@context": {"http://example.com/foo": {"@type": "@id"}}, "http://example.com/foo": {"@list": ["http://foo", "http://bar"]} @@ -1359,7 +1360,7 @@ "http://example.com/foo": [{"@list": [{"@id": "http://foo"}, {"@id": "http://bar"}]}] }]) }, - "explicit list with coerced datatype values": { + 'explicit list with coerced datatype values': { input: %({ "@context": {"http://example.com/foo": {"@type": "http://www.w3.org/2001/XMLSchema#date"}}, "http://example.com/foo": {"@list": ["2012-04-12"]} @@ -1368,7 +1369,7 @@ "http://example.com/foo": [{"@list": [{"@value": "2012-04-12", "@type": "http://www.w3.org/2001/XMLSchema#date"}]}] }]) }, - "expand-0004": { + 'expand-0004': { input: %({ "@context": { "mylist1": {"@id": "http://example.com/mylist1", "@container": "@list"}, @@ -1392,7 +1393,7 @@ } ]) }, - "@list containing @list": { + '@list containing @list': { input: %({ "http://example.com/foo": {"@list": [{"@list": ["baz"]}]} }), @@ -1400,7 +1401,7 @@ "http://example.com/foo": [{"@list": [{"@list": [{"@value": "baz"}]}]}] }]) }, - "@list containing empty @list": { + '@list containing empty @list': { input: %({ "http://example.com/foo": {"@list": [{"@list": []}]} }), @@ -1408,7 +1409,7 @@ "http://example.com/foo": [{"@list": [{"@list": []}]}] }]) }, - "@list containing @list (with coercion)": { + '@list containing @list (with coercion)': { input: %({ "@context": {"foo": {"@id": "http://example.com/foo", "@container": "@list"}}, "foo": [{"@list": ["baz"]}] @@ -1417,7 +1418,7 @@ "http://example.com/foo": [{"@list": [{"@list": [{"@value": "baz"}]}]}] }]) }, - "@list containing empty @list (with coercion)": { + '@list containing empty @list (with coercion)': { input: %({ "@context": {"foo": {"@id": "http://example.com/foo", "@container": "@list"}}, "foo": [{"@list": []}] @@ -1426,7 +1427,7 @@ "http://example.com/foo": [{"@list": [{"@list": []}]}] }]) }, - "coerced @list containing an array": { + 'coerced @list containing an array': { input: %({ "@context": {"foo": {"@id": "http://example.com/foo", "@container": "@list"}}, "foo": [["baz"]] @@ -1435,7 +1436,7 @@ "http://example.com/foo": [{"@list": [{"@list": [{"@value": "baz"}]}]}] }]) }, - "coerced @list containing an empty array": { + 'coerced @list containing an empty array': { input: %({ "@context": {"foo": {"@id": "http://example.com/foo", "@container": "@list"}}, "foo": [[]] @@ -1444,7 +1445,7 @@ "http://example.com/foo": [{"@list": [{"@list": []}]}] }]) }, - "coerced @list containing deep arrays": { + 'coerced @list containing deep arrays': { input: %({ "@context": {"foo": {"@id": "http://example.com/foo", "@container": "@list"}}, "foo": [[["baz"]]] @@ -1453,16 +1454,16 @@ "http://example.com/foo": [{"@list": [{"@list": [{"@list": [{"@value": "baz"}]}]}]}] }]) }, - "coerced @list containing deep empty arrays": { + 'coerced @list containing deep empty arrays': { input: %({ "@context": {"foo": {"@id": "http://example.com/foo", "@container": "@list"}}, "foo": [[[]]] }), output: %([{ "http://example.com/foo": [{"@list": [{"@list": [{"@list": []}]}]}] - }]), + }]) }, - "coerced @list containing multiple lists": { + 'coerced @list containing multiple lists': { input: %({ "@context": {"foo": {"@id": "http://example.com/foo", "@container": "@list"}}, "foo": [["a"], ["b"]] @@ -1474,7 +1475,7 @@ ]}] }]) }, - "coerced @list containing mixed list values": { + 'coerced @list containing mixed list values': { input: %({ "@context": {"foo": {"@id": "http://example.com/foo", "@container": "@list"}}, "foo": [["a"], "b"] @@ -1485,19 +1486,19 @@ {"@value": "b"} ]}] }]) - }, + } }.each do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end end context "@container: @set" do { - "empty": { + empty: { input: %({"http://example.com/foo": {"@set": []}}), output: %([{"http://example.com/foo": []}]) }, - "coerced empty": { + 'coerced empty': { input: %({ "@context": {"http://example.com/foo": {"@container": "@set"}}, "http://example.com/foo": [] @@ -1506,7 +1507,7 @@ "http://example.com/foo": [] }]) }, - "coerced single element": { + 'coerced single element': { input: %({ "@context": {"http://example.com/foo": {"@container": "@set"}}, "http://example.com/foo": [ "foo" ] @@ -1515,7 +1516,7 @@ "http://example.com/foo": [ {"@value": "foo"} ] }]) }, - "coerced multiple elements": { + 'coerced multiple elements': { input: %({ "@context": {"http://example.com/foo": {"@container": "@set"}}, "http://example.com/foo": [ "foo", "bar" ] @@ -1524,7 +1525,7 @@ "http://example.com/foo": [ {"@value": "foo"}, {"@value": "bar"} ] }]) }, - "array containing set": { + 'array containing set': { input: %({ "http://example.com/foo": [{"@set": []}] }), @@ -1532,7 +1533,7 @@ "http://example.com/foo": [] }]) }, - "Free-floating values in sets": { + 'Free-floating values in sets': { input: %({ "@context": {"property": "http://example.com/property"}, "@graph": [{ @@ -1556,13 +1557,13 @@ }]) } }.each do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end end context "@container: @language" do { - "simple map": { + 'simple map': { input: %({ "@context": { "vocab": "http://example.com/vocab/", @@ -1588,7 +1589,7 @@ } ]) }, - "simple map with @none": { + 'simple map with @none': { input: %({ "@context": { "vocab": "http://example.com/vocab/", @@ -1616,7 +1617,7 @@ } ]) }, - "simple map with alias of @none": { + 'simple map with alias of @none': { input: %({ "@context": { "vocab": "http://example.com/vocab/", @@ -1645,7 +1646,7 @@ } ]) }, - "simple map with default direction": { + 'simple map with default direction': { input: %({ "@context": { "@direction": "ltr", @@ -1672,7 +1673,7 @@ } ]) }, - "simple map with term direction": { + 'simple map with term direction': { input: %({ "@context": { "vocab": "http://example.com/vocab/", @@ -1699,7 +1700,7 @@ } ]) }, - "simple map with overriding term direction": { + 'simple map with overriding term direction': { input: %({ "@context": { "vocab": "http://example.com/vocab/", @@ -1727,7 +1728,7 @@ } ]) }, - "simple map with overriding null direction": { + 'simple map with overriding null direction': { input: %({ "@context": { "vocab": "http://example.com/vocab/", @@ -1755,7 +1756,7 @@ } ]) }, - "expand-0035": { + 'expand-0035': { input: %({ "@context": { "@vocab": "http://example.com/vocab/", @@ -1788,13 +1789,13 @@ ]) } }.each do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end end context "@container: @id" do { - "Adds @id to object not having an @id": { + 'Adds @id to object not having an @id': { input: %({ "@context": { "@vocab": "http://example/", @@ -1812,7 +1813,7 @@ ] }]) }, - "Retains @id in object already having an @id": { + 'Retains @id in object already having an @id': { input: %({ "@context": { "@vocab": "http://example/", @@ -1830,7 +1831,7 @@ ] }]) }, - "Adds expanded @id to object": { + 'Adds expanded @id to object': { input: %({ "@context": { "@vocab": "http://example/", @@ -1847,7 +1848,7 @@ }]), base: "http://example.org/" }, - "Raises InvalidContainerMapping if processingMode is 1.0": { + 'Raises InvalidContainerMapping if processingMode is 1.0': { input: %({ "@context": { "@vocab": "http://example/", @@ -1861,7 +1862,7 @@ processingMode: 'json-ld-1.0', exception: JSON::LD::JsonLdError::InvalidContainerMapping }, - "Does not add @id if it is @none, or expands to @none": { + 'Does not add @id if it is @none, or expands to @none': { input: %({ "@context": { "@vocab": "http://example/", @@ -1881,13 +1882,13 @@ }]) } }.each do |title, params| - it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))} + it(title) { run_expand({ processingMode: "json-ld-1.1" }.merge(params)) } end end context "@container: @type" do { - "Adds @type to object not having an @type": { + 'Adds @type to object not having an @type': { input: %({ "@context": { "@vocab": "http://example/", @@ -1905,7 +1906,7 @@ ] }]) }, - "Prepends @type in object already having an @type": { + 'Prepends @type in object already having an @type': { input: %({ "@context": { "@vocab": "http://example/", @@ -1929,7 +1930,7 @@ ] }]) }, - "Adds vocabulary expanded @type to object": { + 'Adds vocabulary expanded @type to object': { input: %({ "@context": { "@vocab": "http://example/", @@ -1945,7 +1946,7 @@ ] }]) }, - "Adds document expanded @type to object": { + 'Adds document expanded @type to object': { input: %({ "@context": { "@vocab": "http://example/", @@ -1962,7 +1963,7 @@ ] }]) }, - "Does not add @type if it is @none, or expands to @none": { + 'Does not add @type if it is @none, or expands to @none': { input: %({ "@context": { "@vocab": "http://example/", @@ -1981,7 +1982,7 @@ ] }]) }, - "Raises InvalidContainerMapping if processingMode is 1.0": { + 'Raises InvalidContainerMapping if processingMode is 1.0': { input: %({ "@context": { "@vocab": "http://example/", @@ -1994,15 +1995,15 @@ }), processingMode: 'json-ld-1.0', exception: JSON::LD::JsonLdError::InvalidContainerMapping - }, + } }.each do |title, params| - it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))} + it(title) { run_expand({ processingMode: "json-ld-1.1" }.merge(params)) } end end context "@container: @graph" do { - "Creates a graph object given a value": { + 'Creates a graph object given a value': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2020,7 +2021,7 @@ }] }]) }, - "Creates a graph object within an array given a value": { + 'Creates a graph object within an array given a value': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2038,7 +2039,7 @@ }] }]) }, - "Creates an graph object if value is a graph": { + 'Creates an graph object if value is a graph': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2059,14 +2060,14 @@ }] }] }]) - }, + } }.each do |title, params| - it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))} + it(title) { run_expand({ processingMode: "json-ld-1.1" }.merge(params)) } end context "+ @index" do { - "Creates a graph object given an indexed value": { + 'Creates a graph object given an indexed value': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2085,7 +2086,7 @@ }] }]) }, - "Creates a graph object given an indexed value with index @none": { + 'Creates a graph object given an indexed value with index @none': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2103,7 +2104,7 @@ }] }]) }, - "Creates a graph object given an indexed value with index alias of @none": { + 'Creates a graph object given an indexed value with index alias of @none': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2122,7 +2123,7 @@ }] }]) }, - "Creates a graph object given an indexed value with @set": { + 'Creates a graph object given an indexed value with @set': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2141,7 +2142,7 @@ }] }]) }, - "Does not create a new graph object if indexed value is already a graph object": { + 'Does not create a new graph object if indexed value is already a graph object': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2163,14 +2164,14 @@ }] }] }]) - }, + } }.each do |title, params| - it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))} + it(title) { run_expand({ processingMode: "json-ld-1.1" }.merge(params)) } end context "@index: property" do { - "it expands to property value, instead of @index": { + 'it expands to property value, instead of @index': { input: %({ "@context": { "@version": 1.1, @@ -2189,16 +2190,16 @@ }] }] }]) - }, + } }.each do |title, params| - it(title) {run_expand(validate: true, **params)} + it(title) { run_expand(validate: true, **params) } end end end context "+ @id" do { - "Creates a graph object given an indexed value": { + 'Creates a graph object given an indexed value': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2217,7 +2218,7 @@ }] }]) }, - "Creates a graph object given an indexed value of @none": { + 'Creates a graph object given an indexed value of @none': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2235,7 +2236,7 @@ }] }]) }, - "Creates a graph object given an indexed value of alias of @none": { + 'Creates a graph object given an indexed value of alias of @none': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2254,7 +2255,7 @@ }] }]) }, - "Creates a graph object given an indexed value with @set": { + 'Creates a graph object given an indexed value with @set': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2273,7 +2274,7 @@ }] }]) }, - "Does not create a new graph object if indexed value is already a graph object": { + 'Does not create a new graph object if indexed value is already a graph object': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2295,16 +2296,16 @@ }] }] }]) - }, + } }.each do |title, params| - it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))} + it(title) { run_expand({ processingMode: "json-ld-1.1" }.merge(params)) } end end end context "@included" do { - "Basic Included array": { + 'Basic Included array': { input: %({ "@context": { "@version": 1.1, @@ -2322,7 +2323,7 @@ }] }]) }, - "Basic Included object": { + 'Basic Included object': { input: %({ "@context": { "@version": 1.1, @@ -2340,7 +2341,7 @@ }] }]) }, - "Multiple properties mapping to @included are folded together": { + 'Multiple properties mapping to @included are folded together': { input: %({ "@context": { "@version": 1.1, @@ -2358,7 +2359,7 @@ ] }]) }, - "Included containing @included": { + 'Included containing @included': { input: %({ "@context": { "@version": 1.1, @@ -2382,7 +2383,7 @@ }] }]) }, - "Property value with @included": { + 'Property value with @included': { input: %({ "@context": { "@version": 1.1, @@ -2404,7 +2405,7 @@ }] }]) }, - "json.api example": { + 'json.api example': { input: %({ "@context": { "@version": 1.1, @@ -2538,7 +2539,7 @@ }] }]) }, - "Error if @included value is a string": { + 'Error if @included value is a string': { input: %({ "@context": { "@version": 1.1, @@ -2548,7 +2549,7 @@ }), exception: JSON::LD::JsonLdError::InvalidIncludedValue }, - "Error if @included value is a value object": { + 'Error if @included value is a value object': { input: %({ "@context": { "@version": 1.1, @@ -2558,7 +2559,7 @@ }), exception: JSON::LD::JsonLdError::InvalidIncludedValue }, - "Error if @included value is a list object": { + 'Error if @included value is a list object': { input: %({ "@context": { "@version": 1.1, @@ -2567,15 +2568,15 @@ "@included": {"@list": ["value"]} }), exception: JSON::LD::JsonLdError::InvalidIncludedValue - }, + } }.each do |title, params| - it(title) {run_expand(params)} + it(title) { run_expand(params) } end end context "@nest" do { - "Expands input using @nest": { + 'Expands input using @nest': { input: %({ "@context": {"@vocab": "http://example.org/"}, "p1": "v1", @@ -2588,7 +2589,7 @@ "http://example.org/p2": [{"@value": "v2"}] }]) }, - "Expands input using aliased @nest": { + 'Expands input using aliased @nest': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2604,7 +2605,7 @@ "http://example.org/p2": [{"@value": "v2"}] }]) }, - "Appends nested values when property at base and nested": { + 'Appends nested values when property at base and nested': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2624,7 +2625,7 @@ ] }]) }, - "Appends nested values from all @nest aliases in term order": { + 'Appends nested values from all @nest aliases in term order': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2649,7 +2650,7 @@ ] }]) }, - "Nested nested containers": { + 'Nested nested containers': { input: %({ "@context": { "@vocab": "http://example.org/" @@ -2672,7 +2673,7 @@ ] }]) }, - "Arrays of nested values": { + 'Arrays of nested values': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2694,7 +2695,7 @@ ] }]) }, - "A nest of arrays": { + 'A nest of arrays': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2718,35 +2719,35 @@ ] }]) }, - "@nest MUST NOT have a string value": { + '@nest MUST NOT have a string value': { input: %({ "@context": {"@vocab": "http://example.org/"}, "@nest": "This should generate an error" }), exception: JSON::LD::JsonLdError::InvalidNestValue }, - "@nest MUST NOT have a boolen value": { + '@nest MUST NOT have a boolen value': { input: %({ "@context": {"@vocab": "http://example.org/"}, "@nest": true }), exception: JSON::LD::JsonLdError::InvalidNestValue }, - "@nest MUST NOT have a numeric value": { + '@nest MUST NOT have a numeric value': { input: %({ "@context": {"@vocab": "http://example.org/"}, "@nest": 1 }), exception: JSON::LD::JsonLdError::InvalidNestValue }, - "@nest MUST NOT have a value object value": { + '@nest MUST NOT have a value object value': { input: %({ "@context": {"@vocab": "http://example.org/"}, "@nest": {"@value": "This should generate an error"} }), exception: JSON::LD::JsonLdError::InvalidNestValue }, - "@nest in term definition MUST NOT be a non-@nest keyword": { + '@nest in term definition MUST NOT be a non-@nest keyword': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2756,7 +2757,7 @@ }), exception: JSON::LD::JsonLdError::InvalidNestValue }, - "@nest in term definition MUST NOT have a boolen value": { + '@nest in term definition MUST NOT have a boolen value': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2766,7 +2767,7 @@ }), exception: JSON::LD::JsonLdError::InvalidNestValue }, - "@nest in term definition MUST NOT have a numeric value": { + '@nest in term definition MUST NOT have a numeric value': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2776,7 +2777,7 @@ }), exception: JSON::LD::JsonLdError::InvalidNestValue }, - "Nested @container: @list": { + 'Nested @container: @list': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2794,7 +2795,7 @@ ]}] }]) }, - "Nested @container: @index": { + 'Nested @container: @index': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2815,7 +2816,7 @@ ] }]) }, - "Nested @container: @language": { + 'Nested @container: @language': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2836,7 +2837,7 @@ ] }]) }, - "Nested @container: @type": { + 'Nested @container: @type': { input: %({ "@context": { "@vocab": "http://example/", @@ -2857,7 +2858,7 @@ ] }]) }, - "Nested @container: @id": { + 'Nested @container: @id': { input: %({ "@context": { "@vocab": "http://example/", @@ -2878,7 +2879,7 @@ ] }]) }, - "Nest term an invalid keyword": { + 'Nest term an invalid keyword': { input: %({ "@context": { "term": {"@id": "http://example/term", "@nest": "@id"} @@ -2886,7 +2887,7 @@ }), exception: JSON::LD::JsonLdError::InvalidNestValue }, - "Nest in @reverse": { + 'Nest in @reverse': { input: %({ "@context": { "term": {"@reverse": "http://example/term", "@nest": "@nest"} @@ -2894,7 +2895,7 @@ }), exception: JSON::LD::JsonLdError::InvalidReverseProperty }, - "Raises InvalidTermDefinition if processingMode is 1.0": { + 'Raises InvalidTermDefinition if processingMode is 1.0': { input: %({ "@context": { "@vocab": "http://example.org/", @@ -2909,7 +2910,7 @@ validate: true, exception: JSON::LD::JsonLdError::InvalidTermDefinition }, - "Applies property scoped contexts which are aliases of @nest": { + 'Applies property scoped contexts which are aliases of @nest': { input: %({ "@context": { "@version": 1.1, @@ -2930,13 +2931,13 @@ }]) } }.each do |title, params| - it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))} + it(title) { run_expand({ processingMode: "json-ld-1.1" }.merge(params)) } end end context "scoped context" do { - "adding new term": { + 'adding new term': { input: %({ "@context": { "@vocab": "http://example/", @@ -2952,7 +2953,7 @@ } ]) }, - "overriding a term": { + 'overriding a term': { input: %({ "@context": { "@vocab": "http://example/", @@ -2969,7 +2970,7 @@ } ]) }, - "property and value with different terms mapping to the same expanded property": { + 'property and value with different terms mapping to the same expanded property': { input: %({ "@context": { "@vocab": "http://example/", @@ -2989,7 +2990,7 @@ } ]) }, - "deep @context affects nested nodes": { + 'deep @context affects nested nodes': { input: %({ "@context": { "@vocab": "http://example/", @@ -3011,7 +3012,7 @@ } ]) }, - "scoped context layers on intemediate contexts": { + 'scoped context layers on intemediate contexts': { input: %({ "@context": { "@vocab": "http://example/", @@ -3038,7 +3039,7 @@ "http://example/c": [{"@value": "C in example"}] }]) }, - "Raises InvalidTermDefinition if processingMode is 1.0": { + 'Raises InvalidTermDefinition if processingMode is 1.0': { input: %({ "@context": { "@vocab": "http://example/", @@ -3052,7 +3053,7 @@ validate: true, exception: JSON::LD::JsonLdError::InvalidTermDefinition }, - "Scoped on id map": { + 'Scoped on id map': { input: %({ "@context": { "@version": 1.1, @@ -3102,13 +3103,13 @@ }]) } }.each do |title, params| - it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))} + it(title) { run_expand({ processingMode: "json-ld-1.1" }.merge(params)) } end end context "scoped context on @type" do { - "adding new term": { + 'adding new term': { input: %({ "@context": { "@vocab": "http://example/", @@ -3125,7 +3126,7 @@ } ]) }, - "overriding a term": { + 'overriding a term': { input: %({ "@context": { "@vocab": "http://example/", @@ -3143,7 +3144,7 @@ } ]) }, - "alias of @type": { + 'alias of @type': { input: %({ "@context": { "@vocab": "http://example/", @@ -3161,7 +3162,7 @@ } ]) }, - "deep @context does not affect nested nodes": { + 'deep @context does not affect nested nodes': { input: %({ "@context": { "@vocab": "http://example/", @@ -3179,7 +3180,7 @@ } ]) }, - "scoped context layers on intemediate contexts": { + 'scoped context layers on intemediate contexts': { input: %({ "@context": { "@vocab": "http://example/", @@ -3202,7 +3203,7 @@ "http://example/c": [{"@value": "C in example"}] }]) }, - "with @container: @type": { + 'with @container: @type': { input: %({ "@context": { "@vocab": "http://example/", @@ -3219,7 +3220,7 @@ ] }]) }, - "orders lexicographically": { + 'orders lexicographically': { input: %({ "@context": { "@vocab": "http://example/", @@ -3236,7 +3237,7 @@ ] }]) }, - "Raises InvalidTermDefinition if processingMode is 1.0": { + 'Raises InvalidTermDefinition if processingMode is 1.0': { input: %({ "@context": { "@vocab": "http://example/", @@ -3247,15 +3248,15 @@ processingMode: 'json-ld-1.0', validate: true, exception: JSON::LD::JsonLdError::InvalidTermDefinition - }, + } }.each do |title, params| - it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))} + it(title) { run_expand({ processingMode: "json-ld-1.1" }.merge(params)) } end end context "@reverse" do { - "@container: @reverse": { + '@container: @reverse': { input: %({ "@context": { "@vocab": "http://example/", @@ -3275,7 +3276,7 @@ } }]) }, - "expand-0037": { + 'expand-0037': { input: %({ "@context": { "name": "http://xmlns.com/foaf/0.1/name" @@ -3312,7 +3313,7 @@ } ]) }, - "expand-0043": { + 'expand-0043': { input: %({ "@context": { "name": "http://xmlns.com/foaf/0.1/name", @@ -3362,16 +3363,16 @@ } ]) }, - "@reverse object with an @id property": { + '@reverse object with an @id property': { input: %({ "@id": "http://example/foo", "@reverse": { "@id": "http://example/bar" } }), - exception: JSON::LD::JsonLdError::InvalidReversePropertyMap, + exception: JSON::LD::JsonLdError::InvalidReversePropertyMap }, - "Explicit and implicit @reverse in same object": { + 'Explicit and implicit @reverse in same object': { input: %({ "@context": { "fooOf": {"@reverse": "ex:foo", "@type": "@id"} @@ -3390,7 +3391,7 @@ } }]) }, - "Two properties both with @reverse": { + 'Two properties both with @reverse': { input: %({ "@context": { "fooOf": {"@reverse": "ex:foo", "@type": "@id"}, @@ -3407,15 +3408,15 @@ "ex:foo": [{"@id": "ex:o1"}] } }]) - }, + } }.each do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end end context "JSON-LD-star" do { - "node with embedded subject without rdfstar option": { + 'node with embedded subject without rdfstar option': { input: %({ "@id": { "@id": "ex:rei", @@ -3425,7 +3426,7 @@ }), exception: JSON::LD::JsonLdError::InvalidIdValue }, - "node object with @annotation property is ignored without rdfstar option": { + 'node object with @annotation property is ignored without rdfstar option': { input: %({ "@id": "ex:bob", "ex:knows": { @@ -3440,7 +3441,7 @@ "ex:knows": [{"@id": "ex:fred"}] }]) }, - "value object with @annotation property is ignored without rdfstar option": { + 'value object with @annotation property is ignored without rdfstar option': { input: %({ "@id": "ex:bob", "ex:age": { @@ -3454,13 +3455,13 @@ "@id": "ex:bob", "ex:age": [{"@value": 23}] }]) - }, + } }.each do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end { - "node with embedded subject having no @id": { + 'node with embedded subject having no @id': { input: %({ "@id": { "ex:prop": "value" @@ -3474,7 +3475,7 @@ "ex:prop": [{"@value": "value2"}] }]) }, - "node with embedded subject having IRI @id": { + 'node with embedded subject having IRI @id': { input: %({ "@id": { "@id": "ex:rei", @@ -3490,7 +3491,7 @@ "ex:prop": [{"@value": "value2"}] }]) }, - "node with embedded subject having BNode @id": { + 'node with embedded subject having BNode @id': { input: %({ "@id": { "@id": "_:rei", @@ -3506,7 +3507,7 @@ "ex:prop": [{"@value": "value2"}] }]) }, - "node with embedded subject having a type": { + 'node with embedded subject having a type': { input: %({ "@id": { "@id": "ex:rei", @@ -3522,7 +3523,7 @@ "ex:prop": [{"@value": "value2"}] }]) }, - "node with embedded subject having an IRI value": { + 'node with embedded subject having an IRI value': { input: %({ "@id": { "@id": "ex:rei", @@ -3538,7 +3539,7 @@ "ex:prop": [{"@value": "value2"}] }]) }, - "node with embedded subject having an BNode value": { + 'node with embedded subject having an BNode value': { input: %({ "@id": { "@id": "ex:rei", @@ -3554,7 +3555,7 @@ "ex:prop": [{"@value": "value2"}] }]) }, - "node with recursive embedded subject": { + 'node with recursive embedded subject': { input: %({ "@id": { "@id": { @@ -3576,7 +3577,7 @@ "ex:prop": [{"@value": "value2"}] }]) }, - "illegal node with subject having no property": { + 'illegal node with subject having no property': { input: %({ "@id": { "@id": "ex:rei" @@ -3585,7 +3586,7 @@ }), exception: JSON::LD::JsonLdError::InvalidEmbeddedNode }, - "illegal node with subject having multiple properties": { + 'illegal node with subject having multiple properties': { input: %({ "@id": { "@id": "ex:rei", @@ -3595,7 +3596,7 @@ }), exception: JSON::LD::JsonLdError::InvalidEmbeddedNode }, - "illegal node with subject having multiple types": { + 'illegal node with subject having multiple types': { input: %({ "@id": { "@id": "ex:rei", @@ -3605,7 +3606,7 @@ }), exception: JSON::LD::JsonLdError::InvalidEmbeddedNode }, - "illegal node with subject having type and property": { + 'illegal node with subject having type and property': { input: %({ "@id": { "@id": "ex:rei", @@ -3616,7 +3617,7 @@ }), exception: JSON::LD::JsonLdError::InvalidEmbeddedNode }, - "node with embedded object": { + 'node with embedded object': { input: %({ "@id": "ex:subj", "ex:value": { @@ -3636,7 +3637,7 @@ }] }]) }, - "node with embedded object having properties": { + 'node with embedded object having properties': { input: %({ "@id": "ex:subj", "ex:value": { @@ -3658,7 +3659,7 @@ }] }]) }, - "node with recursive embedded object": { + 'node with recursive embedded object': { input: %({ "@id": "ex:subj", "ex:value": { @@ -3686,7 +3687,7 @@ }] }]) }, - "node with @annotation property on value object": { + 'node with @annotation property on value object': { input: %({ "@id": "ex:bob", "ex:age": { @@ -3702,7 +3703,7 @@ }] }]) }, - "node with @annotation property on node object": { + 'node with @annotation property on node object': { input: %({ "@id": "ex:bob", "ex:name": "Bob", @@ -3722,7 +3723,7 @@ }] }]) }, - "node with @annotation property multiple values": { + 'node with @annotation property multiple values': { input: %({ "@id": "ex:bob", "ex:name": "Bob", @@ -3750,7 +3751,7 @@ }] }]) }, - "node with @annotation property that is on the top-level is invalid": { + 'node with @annotation property that is on the top-level is invalid': { input: %({ "@id": "ex:bob", "ex:name": "Bob", @@ -3758,7 +3759,7 @@ }), exception: JSON::LD::JsonLdError::InvalidAnnotation }, - "node with @annotation property on a top-level graph node is invalid": { + 'node with @annotation property on a top-level graph node is invalid': { input: %({ "@id": "ex:bob", "ex:name": "Bob", @@ -3770,7 +3771,7 @@ }), exception: JSON::LD::JsonLdError::InvalidAnnotation }, - "node with @annotation property having @id is invalid": { + 'node with @annotation property having @id is invalid': { input: %({ "@id": "ex:bob", "ex:knows": { @@ -3783,7 +3784,7 @@ }), exception: JSON::LD::JsonLdError::InvalidAnnotation }, - "node with @annotation property with value object value is invalid": { + 'node with @annotation property with value object value is invalid': { input: %({ "@id": "ex:bob", "ex:knows": { @@ -3793,7 +3794,7 @@ }), exception: JSON::LD::JsonLdError::InvalidAnnotation }, - "node with @annotation on a list": { + 'node with @annotation on a list': { input: %({ "@id": "ex:bob", "ex:knows": { @@ -3803,7 +3804,7 @@ }), exception: JSON::LD::JsonLdError::InvalidSetOrListObject }, - "node with @annotation on a list value": { + 'node with @annotation on a list value': { input: %({ "@id": "ex:bob", "ex:knows": { @@ -3817,7 +3818,7 @@ }), exception: JSON::LD::JsonLdError::InvalidAnnotation }, - "node with @annotation property on a top-level @included node is invalid": { + 'node with @annotation property on a top-level @included node is invalid': { input: %({ "@id": "ex:bob", "ex:name": "Bob", @@ -3829,7 +3830,7 @@ }), exception: JSON::LD::JsonLdError::InvalidAnnotation }, - "node with @annotation property on embedded subject": { + 'node with @annotation property on embedded subject': { input: %({ "@id": { "@id": "ex:rei", @@ -3853,7 +3854,7 @@ }] }]) }, - "node with @annotation property on embedded object": { + 'node with @annotation property on embedded object': { input: %({ "@id": "ex:subj", "ex:value": { @@ -3877,7 +3878,7 @@ }] }]) }, - "embedded node with reverse relationship": { + 'embedded node with reverse relationship': { input: %({ "@context": { "rel": {"@reverse": "ex:rel"} @@ -3890,7 +3891,7 @@ }), exception: JSON::LD::JsonLdError::InvalidEmbeddedNode }, - "embedded node with expanded reverse relationship": { + 'embedded node with expanded reverse relationship': { input: %({ "@id": { "@id": "ex:rei", @@ -3902,7 +3903,7 @@ }), exception: JSON::LD::JsonLdError::InvalidEmbeddedNode }, - "embedded node used as subject in reverse relationship": { + 'embedded node used as subject in reverse relationship': { input: %({ "@context": { "rel": {"@reverse": "ex:rel"} @@ -3923,7 +3924,7 @@ } }]) }, - "embedded node used as object in reverse relationship": { + 'embedded node used as object in reverse relationship': { input: %({ "@context": { "rel": {"@reverse": "ex:rel"} @@ -3950,7 +3951,7 @@ } }]) }, - "node with @annotation property on node object with reverse relationship": { + 'node with @annotation property on node object with reverse relationship': { input: %({ "@context": { "knownBy": {"@reverse": "ex:knows"} @@ -3975,7 +3976,7 @@ } }]) }, - "reverse relationship inside annotation": { + 'reverse relationship inside annotation': { input: %({ "@context": { "claims": {"@reverse": "ex:claims", "@type": "@id"} @@ -4001,9 +4002,9 @@ }] }] }]) - }, + } }.each do |title, params| - it(title) {run_expand params.merge(rdfstar: true)} + it(title) { run_expand params.merge(rdfstar: true) } end end @@ -4014,13 +4015,14 @@ require 'rexml/document' context "html" do - %w(Nokogiri REXML).each do |impl| + %w[Nokogiri REXML].each do |impl| next unless Module.constants.map(&:to_s).include?(impl) + context impl do - let(:library) {impl.downcase.to_s.to_sym} + let(:library) { impl.downcase.to_s.to_sym } { - "Expands embedded JSON-LD script element": { + 'Expands embedded JSON-LD script element': { input: %( @@ -4038,7 +4040,7 @@ "http://example.com/foo": [{"@list": [{"@value": "bar"}]}] }]) }, - "Expands first script element": { + 'Expands first script element': { input: %( @@ -4065,7 +4067,7 @@ "http://example.com/foo": [{"@list": [{"@value": "bar"}]}] }]) }, - "Expands targeted script element": { + 'Expands targeted script element': { input: %( @@ -4094,7 +4096,7 @@ ]), base: "http://example.org/doc#second" }, - "Expands all script elements with extractAllScripts option": { + 'Expands all script elements with extractAllScripts option': { input: %( @@ -4129,7 +4131,7 @@ ]), extractAllScripts: true }, - "Expands multiple scripts where one is an array": { + 'Expands multiple scripts where one is an array': { input: %( @@ -4156,16 +4158,16 @@ ]), extractAllScripts: true }, - "Errors no script element": { + 'Errors no script element': { input: %(), exception: JSON::LD::JsonLdError::LoadingDocumentFailed }, - "Expands as empty with no script element and extractAllScripts": { + 'Expands as empty with no script element and extractAllScripts': { input: %(), output: %([]), extractAllScripts: true }, - "Expands script element with HTML character references": { + 'Expands script element with HTML character references': { input: %( @@ -4181,7 +4183,7 @@ "http://example/foo": [{"@value": "<&>"}] }]) }, - "Expands embedded JSON-LD script element relative to document base": { + 'Expands embedded JSON-LD script element relative to document base': { input: %( @@ -4202,7 +4204,7 @@ }]), base: "http://example.org/doc" }, - "Expands embedded JSON-LD script element relative to HTML base": { + 'Expands embedded JSON-LD script element relative to HTML base': { input: %( @@ -4224,7 +4226,7 @@ }]), base: "http://example.org/doc" }, - "Expands embedded JSON-LD script element relative to relative HTML base": { + 'Expands embedded JSON-LD script element relative to relative HTML base': { input: %( @@ -4246,7 +4248,7 @@ }]), base: "http://example.org/doc" }, - "Errors if no element found at target": { + 'Errors if no element found at target': { input: %( @@ -4272,7 +4274,7 @@ base: "http://example.org/doc#third", exception: JSON::LD::JsonLdError::LoadingDocumentFailed }, - "Errors if targeted element is not a script element": { + 'Errors if targeted element is not a script element': { input: %( @@ -4289,7 +4291,7 @@ base: "http://example.org/doc#first", exception: JSON::LD::JsonLdError::LoadingDocumentFailed }, - "Errors if targeted element does not have type application/ld+json": { + 'Errors if targeted element does not have type application/ld+json': { input: %( @@ -4306,7 +4308,7 @@ base: "http://example.org/doc#first", exception: JSON::LD::JsonLdError::LoadingDocumentFailed }, - "Errors if uncommented script text contains comment": { + 'Errors if uncommented script text contains comment': { input: %( @@ -4325,7 +4327,7 @@ exception: JSON::LD::JsonLdError::InvalidScriptElement, not: :rexml }, - "Errors if end comment missing": { + 'Errors if end comment missing': { input: %( @@ -4343,7 +4345,7 @@ exception: JSON::LD::JsonLdError::InvalidScriptElement, not: :rexml }, - "Errors if start comment missing": { + 'Errors if start comment missing': { input: %( @@ -4360,7 +4362,7 @@ ), exception: JSON::LD::JsonLdError::InvalidScriptElement }, - "Errors if uncommented script is not valid JSON": { + 'Errors if uncommented script is not valid JSON': { input: %( @@ -4370,12 +4372,12 @@ ), exception: JSON::LD::JsonLdError::InvalidScriptElement - }, + } }.each do |title, params| it(title) do skip "rexml" if params[:not] == library params = params.merge(input: StringIO.new(params[:input])) - params[:input].send(:define_singleton_method, :content_type) {"text/html"} + params[:input].send(:define_singleton_method, :content_type) { "text/html" } run_expand params.merge(validate: true, library: library) end end @@ -4385,7 +4387,7 @@ context "deprectaions" do { - "blank node property": { + 'blank node property': { input: %({"_:bn": "value"}), output: %([{"_:bn": [{"@value": "value"}]}]) } @@ -4402,22 +4404,22 @@ context "exceptions" do { - "non-null @value and null @type": { + 'non-null @value and null @type': { input: %({"http://example.com/foo": {"@value": "foo", "@type": null}}), exception: JSON::LD::JsonLdError::InvalidTypeValue }, - "non-null @value and null @language": { + 'non-null @value and null @language': { input: %({"http://example.com/foo": {"@value": "foo", "@language": null}}), exception: JSON::LD::JsonLdError::InvalidLanguageTaggedString }, - "value with null language": { + 'value with null language': { input: %({ "@context": {"@language": "en"}, "http://example.org/nolang": {"@value": "no language", "@language": null} }), exception: JSON::LD::JsonLdError::InvalidLanguageTaggedString }, - "colliding keywords": { + 'colliding keywords': { input: %({ "@context": { "id": "@id", @@ -4426,9 +4428,9 @@ "id": "http://example/foo", "ID": "http://example/bar" }), - exception: JSON::LD::JsonLdError::CollidingKeywords, + exception: JSON::LD::JsonLdError::CollidingKeywords }, - "@language and @type": { + '@language and @type': { input: %({ "ex:p": { "@value": "v", @@ -4439,7 +4441,7 @@ exception: JSON::LD::JsonLdError::InvalidValueObject, processingMode: 'json-ld-1.1' }, - "@direction and @type": { + '@direction and @type': { input: %({ "ex:p": { "@value": "v", @@ -4449,15 +4451,15 @@ }), exception: JSON::LD::JsonLdError::InvalidValueObject, processingMode: 'json-ld-1.1' - }, + } }.each do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end end context "problem cases" do { - "toRdf/0118": { + 'toRdf/0118': { input: %({ "@context": {"term": "_:term", "termId": { "@id": "term", "@type": "@id" }}, "termId": "term:AppendedToBlankNode" @@ -4465,27 +4467,28 @@ output: %([{ "_:term": [{"@id": "_:termAppendedToBlankNode"}] }]) - }, + } }.each do |title, params| - it(title) {run_expand params} + it(title) { run_expand params } end end end def run_expand(params) - input, output = params[:input], params[:output] + input = params[:input] + output = params[:output] params[:base] ||= nil - input = ::JSON.parse(input) if input.is_a?(String) - output = ::JSON.parse(output) if output.is_a?(String) + input = JSON.parse(input) if input.is_a?(String) + output = JSON.parse(output) if output.is_a?(String) pending params.fetch(:pending, "test implementation") unless input if params[:exception] - expect {JSON::LD::API.expand(input, **params)}.to raise_error(params[:exception]) + expect { JSON::LD::API.expand(input, **params) }.to raise_error(params[:exception]) else jld = nil if params[:write] - expect{jld = JSON::LD::API.expand(input, logger: logger, **params)}.to write(params[:write]).to(:error) + expect { jld = JSON::LD::API.expand(input, logger: logger, **params) }.to write(params[:write]).to(:error) else - expect{jld = JSON::LD::API.expand(input, logger: logger, **params)}.not_to write.to(:error) + expect { jld = JSON::LD::API.expand(input, logger: logger, **params) }.not_to write.to(:error) end expect(jld).to produce_jsonld(output, logger) diff --git a/spec/flatten_spec.rb b/spec/flatten_spec.rb index 1115e735..a8fefe3b 100644 --- a/spec/flatten_spec.rb +++ b/spec/flatten_spec.rb @@ -1,18 +1,19 @@ -# coding: utf-8 +# frozen_string_literal: true + require_relative 'spec_helper' describe JSON::LD::API do - let(:logger) {RDF::Spec.logger} + let(:logger) { RDF::Spec.logger } describe ".flatten" do { - "single object": { + 'single object': { input: %({"@id": "http://example.com", "@type": "http://www.w3.org/2000/01/rdf-schema#Resource"}), output: %([ {"@id": "http://example.com", "@type": ["http://www.w3.org/2000/01/rdf-schema#Resource"]} ]) }, - "embedded object": { + 'embedded object': { input: %({ "@context": { "foaf": "http://xmlns.com/foaf/0.1/" @@ -36,7 +37,7 @@ } ]) }, - "embedded anon": { + 'embedded anon': { input: %({ "@context": { "foaf": "http://xmlns.com/foaf/0.1/" @@ -59,7 +60,7 @@ } ]) }, - "reverse properties": { + 'reverse properties': { input: %([ { "@id": "http://example.com/people/markus", @@ -103,8 +104,8 @@ } ]) }, - "Simple named graph (Wikidata)": { - input: %q({ + 'Simple named graph (Wikidata)': { + input: '{ "@context": { "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", "ex": "http://example.org/", @@ -133,8 +134,8 @@ "ex:hasReference": "http://www.wikipedia.org/" } ] - }), - output: %q([{ + }', + output: '[{ "@id": "http://example.org/ParisFact1", "@type": ["http://www.w3.org/1999/02/22-rdf-syntax-ns#Graph"], "http://example.org/hasReference": [ @@ -154,10 +155,10 @@ "@id": "http://example.org/location/Paris#this", "http://example.org/hasPopulation": [{"@value": 7000000}] }] - }]), + }]' }, - "Test Manifest (shortened)": { - input: %q{ + 'Test Manifest (shortened)': { + input: ' { "@id": "", "http://example/sequence": {"@list": [ @@ -168,8 +169,8 @@ } ]} } - }, - output: %q{ + ', + output: ' [{ "@id": "", "http://example/sequence": [{"@list": [{"@id": "#t0001"}]}] @@ -178,10 +179,10 @@ "http://example/input": [{"@id": "error-expand-0001-in.jsonld"}], "http://example/name": [{"@value": "Keywords cannot be aliased to other keywords"}] }] - }, + ' }, - "@reverse bnode issue (0045)": { - input: %q{ + '@reverse bnode issue (0045)': { + input: ' { "@context": { "foo": "http://example.org/foo", @@ -190,8 +191,8 @@ "foo": "Foo", "bar": [ "http://example.org/origin", "_:b0" ] } - }, - output: %q{ + ', + output: ' [ { "@id": "_:b0", @@ -206,10 +207,10 @@ "http://example.org/bar": [ { "@id": "_:b0" } ] } ] - }, + ', remap_nodes: true }, - "@list with embedded object": { + '@list with embedded object': { input: %([{ "http://example.com/foo": [{ "@list": [{ @@ -235,16 +236,16 @@ } ]) }, - "coerced @list containing an deep list": { + 'coerced @list containing an deep list': { input: %([{ "http://example.com/foo": [{"@list": [{"@list": [{"@list": [{"@value": "baz"}]}]}]}] }]), output: %([{ "@id": "_:b0", "http://example.com/foo": [{"@list": [{"@list": [{"@list": [{"@value": "baz"}]}]}]}] - }]), + }]) }, - "@list containing empty @list": { + '@list containing empty @list': { input: %({ "http://example.com/foo": {"@list": [{"@list": []}]} }), @@ -253,7 +254,7 @@ "http://example.com/foo": [{"@list": [{"@list": []}]}] }]) }, - "coerced @list containing mixed list values": { + 'coerced @list containing mixed list values': { input: %({ "@context": {"foo": {"@id": "http://example.com/foo", "@container": "@list"}}, "foo": [ @@ -279,14 +280,14 @@ "http://example/Baz" ] }]) - }, + } }.each do |title, params| - it(title) {run_flatten(params)} + it(title) { run_flatten(params) } end context "@included" do { - "Basic Included array": { + 'Basic Included array': { input: %({ "@context": { "@version": 1.1, @@ -305,7 +306,7 @@ "http://example.org/prop": [{"@value": "value2"}] }]) }, - "Basic Included object": { + 'Basic Included object': { input: %({ "@context": { "@version": 1.1, @@ -324,7 +325,7 @@ "http://example.org/prop": [{"@value": "value2"}] }]) }, - "Multiple properties mapping to @included are folded together": { + 'Multiple properties mapping to @included are folded together': { input: %({ "@context": { "@version": 1.1, @@ -343,7 +344,7 @@ "http://example.org/prop": [{"@value": "value2"}] }]) }, - "Included containing @included": { + 'Included containing @included': { input: %({ "@context": { "@version": 1.1, @@ -368,7 +369,7 @@ "http://example.org/prop": [{"@value": "value3"}] }]) }, - "Property value with @included": { + 'Property value with @included': { input: %({ "@context": { "@version": 1.1, @@ -394,7 +395,7 @@ "@type": ["http://example.org/Bar"] }]) }, - "json.api example": { + 'json.api example': { input: %({ "@context": { "@version": 1.1, @@ -525,16 +526,16 @@ ], "http://example.org/vocab#related": [{"@id": "http://example.com/articles/1/author"}] }]) - }, + } }.each do |title, params| - it(title) {run_flatten(params)} + it(title) { run_flatten(params) } end end end context "html" do { - "Flattens embedded JSON-LD script element": { + 'Flattens embedded JSON-LD script element': { input: %( @@ -556,7 +557,7 @@ "@graph": [{"@id": "_:b0","foo": ["bar"]}] }) }, - "Flattens first script element with extractAllScripts: false": { + 'Flattens first script element with extractAllScripts: false': { input: %( @@ -588,7 +589,7 @@ }), extractAllScripts: false }, - "Flattens targeted script element": { + 'Flattens targeted script element': { input: %( @@ -621,7 +622,7 @@ }), base: "http://example.org/doc#second" }, - "Flattens all script elements by default": { + 'Flattens all script elements by default': { input: %( @@ -656,11 +657,11 @@ {"@id": "_:b2", "ex:bar": "bar"} ] }) - }, + } }.each do |title, params| it(title) do params[:input] = StringIO.new(params[:input]) - params[:input].send(:define_singleton_method, :content_type) {"text/html"} + params[:input].send(:define_singleton_method, :content_type) { "text/html" } run_flatten params.merge(validate: true) end end @@ -668,7 +669,7 @@ context "JSON-LD-star" do { - "node object with @annotation property is ignored without rdfstar option": { + 'node object with @annotation property is ignored without rdfstar option': { input: %({ "@id": "ex:bob", "ex:knows": { @@ -683,7 +684,7 @@ "ex:knows": [{"@id": "ex:fred"}] }]) }, - "value object with @annotation property is ignored without rdfstar option": { + 'value object with @annotation property is ignored without rdfstar option': { input: %({ "@id": "ex:bob", "ex:age": { @@ -697,13 +698,13 @@ "@id": "ex:bob", "ex:age": [{"@value": 23}] }]) - }, + } }.each do |title, params| - it(title) {run_flatten params} + it(title) { run_flatten params } end { - "node with embedded subject having no @id": { + 'node with embedded subject having no @id': { input: %({ "@id": { "ex:prop": "value" @@ -717,7 +718,7 @@ "ex:prop": [{"@value": "value2"}] }]) }, - "node with embedded subject having IRI @id": { + 'node with embedded subject having IRI @id': { input: %({ "@id": { "@id": "ex:rei", @@ -733,7 +734,7 @@ "ex:prop": [{"@value": "value2"}] }]) }, - "node with embedded subject having BNode @id": { + 'node with embedded subject having BNode @id': { input: %({ "@id": { "@id": "_:rei", @@ -749,7 +750,7 @@ "ex:prop": [{"@value": "value2"}] }]) }, - "node with embedded subject having a type": { + 'node with embedded subject having a type': { input: %({ "@id": { "@id": "ex:rei", @@ -765,7 +766,7 @@ "ex:prop": [{"@value": "value2"}] }]) }, - "node with embedded subject having an IRI value": { + 'node with embedded subject having an IRI value': { input: %({ "@id": { "@id": "ex:rei", @@ -781,7 +782,7 @@ "ex:prop": [{"@value": "value2"}] }]) }, - "node with embedded subject having an BNode value": { + 'node with embedded subject having an BNode value': { input: %({ "@id": { "@id": "ex:rei", @@ -797,7 +798,7 @@ "ex:prop": [{"@value": "value2"}] }]) }, - "node with recursive embedded subject": { + 'node with recursive embedded subject': { input: %({ "@id": { "@id": { @@ -819,7 +820,7 @@ "ex:prop": [{"@value": "value2"}] }]) }, - "node with embedded object": { + 'node with embedded object': { input: %({ "@id": "ex:subj", "ex:value": { @@ -839,7 +840,7 @@ }] }]) }, - "node with embedded object having properties": { + 'node with embedded object having properties': { input: %({ "@id": "ex:subj", "ex:value": { @@ -866,7 +867,7 @@ "ex:prop": [{"@value": "value2"}] }]) }, - "node with recursive embedded object": { + 'node with recursive embedded object': { input: %({ "@id": "ex:subj", "ex:value": { @@ -902,7 +903,7 @@ "ex:prop": [{"@value": "value2"}] }]) }, - "node with @annotation property on value object": { + 'node with @annotation property on value object': { input: %({ "@id": "ex:bob", "ex:age": { @@ -921,7 +922,7 @@ "ex:certainty": [{"@value": 0.8}] }]) }, - "node with @annotation property on node object": { + 'node with @annotation property on node object': { input: %({ "@id": "ex:bob", "ex:name": "Bob", @@ -946,7 +947,7 @@ "ex:certainty": [{"@value": 0.8}] }]) }, - "node with @annotation property multiple values": { + 'node with @annotation property multiple values': { input: %({ "@id": "ex:bob", "ex:name": "Bob", @@ -976,7 +977,7 @@ "ex:source": [{"@id": "http://example.org/"}] }]) }, - "node with @annotation property on embedded subject": { + 'node with @annotation property on embedded subject': { input: %({ "@id": { "@id": "ex:rei", @@ -1004,7 +1005,7 @@ "ex:certainty": [{"@value": 0.8}] }]) }, - "node with @annotation property on embedded object": { + 'node with @annotation property on embedded object': { input: %({ "@id": "ex:subj", "ex:value": { @@ -1036,7 +1037,7 @@ "ex:certainty": [{"@value": 0.8}] }]) }, - "embedded node used as subject in reverse relationship": { + 'embedded node used as subject in reverse relationship': { input: %({ "@context": { "rel": {"@reverse": "ex:rel"} @@ -1057,7 +1058,7 @@ }] }]) }, - "embedded node used as object in reverse relationship": { + 'embedded node used as object in reverse relationship': { input: %({ "@context": { "rel": {"@reverse": "ex:rel"} @@ -1080,7 +1081,7 @@ "ex:prop": [{"@id": "ex:value2"}] }]) }, - "node with @annotation property on node object with reverse relationship": { + 'node with @annotation property on node object with reverse relationship': { input: %({ "@context": { "knownBy": {"@reverse": "ex:knows"} @@ -1108,7 +1109,7 @@ "ex:certainty": [{"@value": 0.8}] }]) }, - "reverse relationship inside annotation": { + 'reverse relationship inside annotation': { input: %({ "@context": { "claims": {"@reverse": "ex:claims", "@type": "@id"} @@ -1141,7 +1142,7 @@ }] }]) }, - "embedded node with annotation on value object": { + 'embedded node with annotation on value object': { input: %({ "@context": { "@base": "http://example.org/", @@ -1175,25 +1176,29 @@ }]) } }.each do |title, params| - it(title) {run_flatten params.merge(rdfstar: true)} + it(title) { run_flatten params.merge(rdfstar: true) } end end def run_flatten(params) - input, output, context = params[:input], params[:output], params[:context] - input = ::JSON.parse(input) if input.is_a?(String) - output = ::JSON.parse(output) if output.is_a?(String) - context = ::JSON.parse(context) if context.is_a?(String) + input = params[:input] + output = params[:output] + context = params[:context] + input = JSON.parse(input) if input.is_a?(String) + output = JSON.parse(output) if output.is_a?(String) + context = JSON.parse(context) if context.is_a?(String) params[:base] ||= nil pending params.fetch(:pending, "test implementation") unless input if params[:exception] - expect {JSON::LD::API.flatten(input, context, logger: logger, **params)}.to raise_error(params[:exception]) + expect { JSON::LD::API.flatten(input, context, logger: logger, **params) }.to raise_error(params[:exception]) else jld = nil if params[:write] - expect{jld = JSON::LD::API.flatten(input, context, logger: logger, **params)}.to write(params[:write]).to(:error) + expect do + jld = JSON::LD::API.flatten(input, context, logger: logger, **params) + end.to write(params[:write]).to(:error) else - expect{jld = JSON::LD::API.flatten(input, context, logger: logger, **params)}.not_to write.to(:error) + expect { jld = JSON::LD::API.flatten(input, context, logger: logger, **params) }.not_to write.to(:error) end jld = remap_bnodes(jld, output) if params[:remap_nodes] diff --git a/spec/format_spec.rb b/spec/format_spec.rb index f23cb975..179ff418 100644 --- a/spec/format_spec.rb +++ b/spec/format_spec.rb @@ -1,20 +1,21 @@ -# coding: utf-8 +# frozen_string_literal: true + require_relative 'spec_helper' require 'rdf/spec/format' describe JSON::LD::Format do it_behaves_like 'an RDF::Format' do - let(:format_class) {JSON::LD::Format} + let(:format_class) { described_class } end describe ".for" do formats = [ :jsonld, "etc/doap.jsonld", - {file_name: 'etc/doap.jsonld'}, - {file_extension: 'jsonld'}, - {content_type: 'application/ld+json'}, - {content_type: 'application/x-ld+json'}, + { file_name: 'etc/doap.jsonld' }, + { file_extension: 'jsonld' }, + { content_type: 'application/ld+json' }, + { content_type: 'application/x-ld+json' } ].each do |arg| it "discovers with #{arg.inspect}" do expect(RDF::Format.for(arg)).to eq described_class @@ -22,32 +23,32 @@ end { - jsonld: '{"@context" => "foo"}', - context: %({\n"@context": {), - id: %({\n"@id": {), - type: %({\n"@type": {), + jsonld: '{"@context" => "foo"}', + context: %({\n"@context": {), + id: %({\n"@id": {), + type: %({\n"@type": {) }.each do |sym, str| it "detects #{sym}" do - expect(described_class.for {str}).to eq described_class + expect(described_class.for { str }).to eq described_class end end - it "should discover 'jsonld'" do + it "discovers 'jsonld'" do expect(RDF::Format.for(:jsonld).reader).to eq JSON::LD::Reader end end describe "#to_sym" do - specify {expect(described_class.to_sym).to eq :jsonld} + specify { expect(described_class.to_sym).to eq :jsonld } end describe "#to_uri" do - specify {expect(described_class.to_uri).to eq RDF::URI('http://www.w3.org/ns/formats/JSON-LD')} + specify { expect(described_class.to_uri).to eq RDF::URI('http://www.w3.org/ns/formats/JSON-LD') } end describe ".detect" do { - jsonld: '{"@context" => "foo"}', + jsonld: '{"@context" => "foo"}' }.each do |sym, str| it "detects #{sym}" do expect(described_class.detect(str)).to be_truthy @@ -55,14 +56,14 @@ end { - n3: "@prefix foo: .\nfoo:bar = { } .", - nquads: " . ", - rdfxml: '', - rdfa: '
', - microdata: '
', - ntriples: "
.", + n3: "@prefix foo: .\nfoo:bar = { } .", + nquads: " . ", + rdfxml: '', + rdfa: '
', + microdata: '
', + ntriples: "
.", multi_line: '\n \n "literal"\n .', - turtle: "@prefix foo: .\n foo:a foo:b .", + turtle: "@prefix foo: .\n foo:a foo:b ." }.each do |sym, str| it "does not detect #{sym}" do expect(described_class.detect(str)).to be_falsey @@ -72,43 +73,66 @@ describe ".cli_commands", skip: Gem.win_platform? do require 'rdf/cli' - let(:ttl) {File.expand_path("../test-files/test-1-rdf.ttl", __FILE__)} - let(:json) {File.expand_path("../test-files/test-1-input.jsonld", __FILE__)} - let(:context) {File.expand_path("../test-files/test-1-context.jsonld", __FILE__)} + let(:ttl) { File.expand_path('test-files/test-1-rdf.ttl', __dir__) } + let(:json) { File.expand_path('test-files/test-1-input.jsonld', __dir__) } + let(:context) { File.expand_path('test-files/test-1-context.jsonld', __dir__) } describe "#expand" do it "expands RDF" do - expect {RDF::CLI.exec(["expand", ttl], format: :ttl, output_format: :jsonld)}.to write.to(:output) + expect { RDF::CLI.exec(["expand", ttl], format: :ttl, output_format: :jsonld) }.to write.to(:output) end + it "expands JSON" do - expect {RDF::CLI.exec(["expand", json], format: :jsonld, output_format: :jsonld, validate: false)}.to write.to(:output) + expect do + RDF::CLI.exec(["expand", json], format: :jsonld, output_format: :jsonld, validate: false) + end.to write.to(:output) end end describe "#compact" do it "compacts RDF" do - expect {RDF::CLI.exec(["compact", ttl], context: context, format: :ttl, output_format: :jsonld, validate: false)}.to write.to(:output) + expect do + RDF::CLI.exec(["compact", ttl], context: context, format: :ttl, output_format: :jsonld, + validate: false) + end.to write.to(:output) end + it "compacts JSON" do - expect {RDF::CLI.exec(["compact", json], context: context, format: :jsonld, output_format: :jsonld, validate: false)}.to write.to(:output) + expect do + RDF::CLI.exec(["compact", json], context: context, format: :jsonld, output_format: :jsonld, + validate: false) + end.to write.to(:output) end end describe "#flatten" do it "flattens RDF" do - expect {RDF::CLI.exec(["flatten", ttl], context: context, format: :ttl, output_format: :jsonld, validate: false)}.to write.to(:output) + expect do + RDF::CLI.exec(["flatten", ttl], context: context, format: :ttl, output_format: :jsonld, + validate: false) + end.to write.to(:output) end + it "flattens JSON" do - expect {RDF::CLI.exec(["flatten", json], context: context, format: :jsonld, output_format: :jsonld, validate: false)}.to write.to(:output) + expect do + RDF::CLI.exec(["flatten", json], context: context, format: :jsonld, output_format: :jsonld, + validate: false) + end.to write.to(:output) end end describe "#frame" do it "frames RDF" do - expect {RDF::CLI.exec(["frame", ttl], frame: context, format: :ttl, output_format: :jsonld)}.to write.to(:output) + expect do + RDF::CLI.exec(["frame", ttl], frame: context, format: :ttl, output_format: :jsonld) + end.to write.to(:output) end + it "frames JSON" do - expect {RDF::CLI.exec(["frame", json], frame: context, format: :jsonld, output_format: :jsonld, validate: false)}.to write.to(:output) + expect do + RDF::CLI.exec(["frame", json], frame: context, format: :jsonld, output_format: :jsonld, + validate: false) + end.to write.to(:output) end end end diff --git a/spec/frame_spec.rb b/spec/frame_spec.rb index 8f3ad7f5..72ed82cd 100644 --- a/spec/frame_spec.rb +++ b/spec/frame_spec.rb @@ -1,12 +1,13 @@ -# coding: utf-8 +# frozen_string_literal: true + require_relative 'spec_helper' describe JSON::LD::API do - let(:logger) {RDF::Spec.logger} + let(:logger) { RDF::Spec.logger } describe ".frame" do { - "exact @type match": { + 'exact @type match': { frame: %({ "@context": {"ex": "http://example.org/"}, "@type": "ex:Type1" @@ -30,7 +31,7 @@ }] }) }, - "wildcard @type match": { + 'wildcard @type match': { frame: %({ "@context": {"ex": "http://example.org/"}, "@type": {} @@ -57,7 +58,7 @@ }] }) }, - "match none @type match": { + 'match none @type match': { frame: %({ "@context": {"ex": "http://example.org/"}, "@type": [] @@ -82,7 +83,7 @@ }] }) }, - "multiple matches on @type": { + 'multiple matches on @type': { frame: %({ "@context": {"ex": "http://example.org/"}, "@type": "ex:Type1" @@ -114,7 +115,7 @@ }] }) }, - "single @id match": { + 'single @id match': { frame: %({ "@context": {"ex": "http://example.org/"}, "@id": "ex:Sub1" @@ -138,7 +139,7 @@ }] }) }, - "multiple @id match": { + 'multiple @id match': { frame: %({ "@context": {"ex": "http://example.org/"}, "@id": ["ex:Sub1", "ex:Sub2"] @@ -169,7 +170,7 @@ }] }) }, - "wildcard and match none": { + 'wildcard and match none': { frame: %({ "@context": {"ex": "http://example.org/"}, "ex:p": [], @@ -196,7 +197,7 @@ }] }) }, - "match on any property if @requireAll is false": { + 'match on any property if @requireAll is false': { frame: %({ "@context": {"ex": "http://example.org/"}, "@requireAll": false, @@ -227,7 +228,7 @@ }] }) }, - "match on defeaults if @requireAll is true and at least one property matches": { + 'match on defeaults if @requireAll is true and at least one property matches': { frame: %({ "@context": {"ex": "http://example.org/"}, "@requireAll": true, @@ -271,7 +272,7 @@ }] }) }, - "match with @requireAll with one default": { + 'match with @requireAll with one default': { frame: %({ "@context": {"ex": "http://example.org/"}, "@requireAll": true, @@ -372,7 +373,7 @@ }] }) }, - "issue #40 - example": { + 'issue #40 - example': { frame: %({ "@context": { "@version": 1.1, @@ -417,18 +418,18 @@ }), processingMode: 'json-ld-1.1' }, - "implicitly includes unframed properties (default @explicit false)": { + 'implicitly includes unframed properties (default @explicit false)': { frame: %({ "@context": {"ex": "http://example.org/"}, "@type": "ex:Type1" }), - input: %q({ + input: '{ "@context": {"ex": "http://example.org/"}, "@id": "ex:Sub1", "@type": "ex:Type1", "ex:prop1": "Property 1", "ex:prop2": {"@id": "ex:Obj1"} - }), + }', output: %({ "@context": {"ex": "http://example.org/"}, "@graph": [{ @@ -439,19 +440,19 @@ }] }) }, - "explicitly includes unframed properties @explicit false": { + 'explicitly includes unframed properties @explicit false': { frame: %({ "@context": {"ex": "http://example.org/"}, "@explicit": false, "@type": "ex:Type1" }), - input: %q({ + input: '{ "@context": {"ex": "http://example.org/"}, "@id": "ex:Sub1", "@type": "ex:Type1", "ex:prop1": "Property 1", "ex:prop2": {"@id": "ex:Obj1"} - }), + }', output: %({ "@context": {"ex": "http://example.org/"}, "@graph": [{ @@ -462,7 +463,7 @@ }] }) }, - "explicitly excludes unframed properties (@explicit: true)": { + 'explicitly excludes unframed properties (@explicit: true)': { frame: %({ "@context": {"ex": "http://example.org/"}, "@explicit": true, @@ -483,7 +484,7 @@ }] }) }, - "non-existent framed properties create null property": { + 'non-existent framed properties create null property': { frame: %({ "@context": {"ex": "http://example.org/"}, "@type": "ex:Type1", @@ -511,7 +512,7 @@ }] }) }, - "non-existent framed properties create default property": { + 'non-existent framed properties create default property': { frame: %({ "@context": { "ex": "http://example.org/", @@ -541,7 +542,7 @@ }] }) }, - "default value for @type": { + 'default value for @type': { frame: %({ "@context": {"ex": "http://example.org/"}, "@type": {"@default": "ex:Foo"}, @@ -561,7 +562,7 @@ }] }) }, - "mixed content": { + 'mixed content': { frame: %({ "@context": {"ex": "http://example.org/"}, "ex:mixed": {"@embed": "@never"} @@ -585,7 +586,7 @@ }] }) }, - "no embedding (@embed: @never)": { + 'no embedding (@embed: @never)': { frame: %({ "@context": {"ex": "http://example.org/"}, "ex:embed": {"@embed": "@never"} @@ -606,7 +607,7 @@ }] }) }, - "first embed (@embed: @once)": { + 'first embed (@embed: @once)': { frame: %({ "@context": {"ex": "http://www.example.com/#"}, "@type": "ex:Thing", @@ -632,7 +633,7 @@ }), ordered: true }, - "always embed (@embed: @always)": { + 'always embed (@embed: @always)': { frame: %({ "@context": {"ex": "http://www.example.com/#"}, "@type": "ex:Thing", @@ -657,7 +658,7 @@ ] }) }, - "mixed list": { + 'mixed list': { frame: %({ "@context": {"ex": "http://example.org/"}, "ex:mixedlist": {} @@ -687,7 +688,7 @@ }] }) }, - "framed list": { + 'framed list': { frame: %({ "@context": { "ex": "http://example.org/", @@ -722,7 +723,7 @@ }] }) }, - "presentation example": { + 'presentation example': { frame: %({ "@context": { "primaryTopic": { @@ -769,7 +770,7 @@ }] }) }, - "microdata manifest": { + 'microdata manifest': { frame: %({ "@context": { "xsd": "http://www.w3.org/2001/XMLSchema#", @@ -792,7 +793,7 @@ } }] }), - input: %q({ + input: '{ "@context": { "md": "http://www.w3.org/ns/md#", "mf": "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#", @@ -817,7 +818,7 @@ "mq:data": {"@id": "http://www.w3.org/TR/microdata-rdf/tests/0001.html"}, "mq:query": {"@id": "http://www.w3.org/TR/microdata-rdf/tests/0001.ttl"} }] - }), + }', output: %({ "@context": { "xsd": "http://www.w3.org/2001/XMLSchema#", @@ -848,7 +849,7 @@ }), processingMode: 'json-ld-1.1' }, - "library": { + library: { frame: %({ "@context": { "dc": "http://purl.org/dc/elements/1.1/", @@ -917,7 +918,7 @@ describe "@reverse" do { - "embed matched frames with @reverse": { + 'embed matched frames with @reverse': { frame: %({ "@context": {"ex": "http://example.org/"}, "@type": "ex:Type1", @@ -950,7 +951,7 @@ }] }) }, - "embed matched frames with reversed property": { + 'embed matched frames with reversed property': { frame: %({ "@context": { "ex": "http://example.org/", @@ -984,7 +985,7 @@ } }] }) - }, + } }.each do |title, params| it title do do_frame(params) @@ -994,7 +995,7 @@ context "omitGraph option" do { - "Defaults to false in 1.0": { + 'Defaults to false in 1.0': { input: %([{ "http://example.org/prop": [{"@value": "value"}], "http://example.org/foo": [{"@value": "bar"}] @@ -1015,7 +1016,7 @@ }), processingMode: "json-ld-1.0" }, - "Set with option in 1.0": { + 'Set with option in 1.0': { input: %([{ "http://example.org/prop": [{"@value": "value"}], "http://example.org/foo": [{"@value": "bar"}] @@ -1035,7 +1036,7 @@ processingMode: "json-ld-1.0", omitGraph: true }, - "Defaults to true in 1.1": { + 'Defaults to true in 1.1': { input: %([{ "http://example.org/prop": [{"@value": "value"}], "http://example.org/foo": [{"@value": "bar"}] @@ -1054,7 +1055,7 @@ }), processingMode: "json-ld-1.1" }, - "Set with option in 1.1": { + 'Set with option in 1.1': { input: %([{ "http://example.org/prop": [{"@value": "value"}], "http://example.org/foo": [{"@value": "bar"}] @@ -1075,15 +1076,15 @@ }), processingMode: "json-ld-1.1", omitGraph: false - }, + } }.each do |title, params| - it(title) {do_frame(params.merge(pruneBlankNodeIdentifiers: true))} + it(title) { do_frame(params.merge(pruneBlankNodeIdentifiers: true)) } end end context "@included" do { - "Basic Included array": { + 'Basic Included array': { input: %([{ "http://example.org/prop": [{"@value": "value"}], "http://example.org/foo": [{"@value": "bar"}] @@ -1120,7 +1121,7 @@ "prop": "value" }) }, - "Basic Included object": { + 'Basic Included object': { input: %([{ "http://example.org/prop": [{"@value": "value"}], "http://example.org/foo": [{"@value": "bar"}] @@ -1155,7 +1156,7 @@ } }) }, - "json.api example": { + 'json.api example': { input: %([{ "@id": "http://example.org/base/1", "@type": ["http://example.org/vocab#articles"], @@ -1272,16 +1273,16 @@ "author": "9", "self": "http://example.com/comments/12" }] - }), - }, + }) + } }.each do |title, params| - it(title) {do_frame(params.merge(processingMode: 'json-ld-1.1'))} + it(title) { do_frame(params.merge(processingMode: 'json-ld-1.1')) } end end describe "node pattern" do { - "matches a deep node pattern": { + 'matches a deep node pattern': { frame: %({ "@context": {"ex": "http://example.org/"}, "ex:p": { @@ -1320,7 +1321,7 @@ } }] }) - }, + } }.each do |title, params| it title do do_frame(params) @@ -1330,7 +1331,7 @@ describe "value pattern" do { - "matches exact values": { + 'matches exact values': { frame: %({ "@context": {"ex": "http://example.org/"}, "ex:p": "P", @@ -1354,7 +1355,7 @@ }] }) }, - "matches wildcard @value": { + 'matches wildcard @value': { frame: %({ "@context": {"ex": "http://example.org/"}, "ex:p": {"@value": {}}, @@ -1378,7 +1379,7 @@ }] }) }, - "matches wildcard @type": { + 'matches wildcard @type': { frame: %({ "@context": {"ex": "http://example.org/"}, "ex:q": {"@value": "Q", "@type": {}} @@ -1396,7 +1397,7 @@ }] }) }, - "matches wildcard @language": { + 'matches wildcard @language': { frame: %({ "@context": {"ex": "http://example.org/"}, "ex:r": {"@value": "R", "@language": {}} @@ -1414,7 +1415,7 @@ }] }) }, - "match none @type": { + 'match none @type': { frame: %({ "@context": {"ex": "http://example.org/"}, "ex:p": {"@value": {}, "@type": []}, @@ -1438,7 +1439,7 @@ }] }) }, - "match none @language": { + 'match none @language': { frame: %({ "@context": {"ex": "http://example.org/"}, "ex:p": {"@value": {}, "@language": []}, @@ -1462,7 +1463,7 @@ }] }) }, - "matches some @value": { + 'matches some @value': { frame: %({ "@context": {"ex": "http://example.org/"}, "ex:p": {"@value": ["P", "Q", "R"]}, @@ -1486,7 +1487,7 @@ }] }) }, - "matches some @type": { + 'matches some @type': { frame: %({ "@context": {"ex": "http://example.org/"}, "ex:q": {"@value": "Q", "@type": ["ex:q", "ex:Q"]} @@ -1504,7 +1505,7 @@ }] }) }, - "matches some @language": { + 'matches some @language': { frame: %({ "@context": {"ex": "http://example.org/"}, "ex:r": {"@value": "R", "@language": ["p", "q", "r"]} @@ -1522,7 +1523,7 @@ }] }) }, - "excludes non-matched values": { + 'excludes non-matched values': { frame: %({ "@context": {"ex": "http://example.org/"}, "ex:p": {"@value": {}}, @@ -1545,7 +1546,7 @@ "ex:r": {"@value": "R", "@language": "R"} }] }) - }, + } }.each do |title, params| it title do do_frame(params) @@ -1555,7 +1556,7 @@ describe "named graphs" do { - "Merge graphs if no outer @graph is used": { + 'Merge graphs if no outer @graph is used': { frame: %({ "@context": {"@vocab": "urn:"}, "@type": "Class" @@ -1579,7 +1580,7 @@ }), processingMode: 'json-ld-1.1' }, - "Frame default graph if outer @graph is used": { + 'Frame default graph if outer @graph is used': { frame: %({ "@context": {"@vocab": "urn:"}, "@type": "Class", @@ -1611,7 +1612,7 @@ }), processingMode: 'json-ld-1.1' }, - "Merge one graph and preserve another": { + 'Merge one graph and preserve another': { frame: %({ "@context": {"@vocab": "urn:"}, "@type": "Class", @@ -1656,7 +1657,7 @@ }), processingMode: 'json-ld-1.1' }, - "Merge one graph and deep preserve another": { + 'Merge one graph and deep preserve another': { frame: %({ "@context": {"@vocab": "urn:"}, "@type": "Class", @@ -1705,7 +1706,7 @@ }), processingMode: 'json-ld-1.1' }, - "library": { + library: { frame: %({ "@context": {"@vocab": "http://example.org/"}, "@type": "Library", @@ -1758,7 +1759,7 @@ }), processingMode: 'json-ld-1.1' }, - "named graph with @embed: @never": { + 'named graph with @embed: @never': { input: %({ "@id": "ex:cred", "ex:subject": { @@ -1815,7 +1816,7 @@ } }), processingMode: 'json-ld-1.1' - }, + } }.each do |title, params| it title do do_frame(params) @@ -1826,7 +1827,7 @@ describe "prune blank nodes" do { - "preserves single-use bnode identifiers if @version 1.0": { + 'preserves single-use bnode identifiers if @version 1.0': { frame: %({ "@context": { "dc": "http://purl.org/dc/terms/", @@ -1877,7 +1878,7 @@ }), processingMode: 'json-ld-1.0' }, - "preserves single-use bnode identifiers if pruneBlankNodeIdentifiers=false": { + 'preserves single-use bnode identifiers if pruneBlankNodeIdentifiers=false': { frame: %({ "@context": { "dc": "http://purl.org/dc/terms/", @@ -1928,7 +1929,7 @@ }), pruneBlankNodeIdentiers: false }, - "framing with @version: 1.1 prunes identifiers": { + 'framing with @version: 1.1 prunes identifiers': { frame: %({ "@context": { "@version": 1.1, @@ -1985,7 +1986,7 @@ context "problem cases" do { - "pr #20": { + 'pr #20': { frame: %({}), input: %([ { @@ -2019,7 +2020,7 @@ ] }) }, - "issue #28": { + 'issue #28': { frame: %({ "@context": { "rdfs": "http://www.w3.org/2000/01/rdf-schema#", @@ -2074,7 +2075,7 @@ ] }) }, - "PR #663 - Multiple named graphs": { + 'PR #663 - Multiple named graphs': { frame: %({ "@context": { "@vocab": "http://example.com/", @@ -2184,8 +2185,8 @@ }), processingMode: 'json-ld-1.1' }, - "w3c/json-ld-framing#5": { - frame: %({ + 'w3c/json-ld-framing#5': { + frame: %({ "@context" : { "@vocab" : "http://purl.bdrc.io/ontology/core/", "taxSubclassOf" : { @@ -2257,7 +2258,7 @@ }), processingMode: 'json-ld-1.1' }, - "issue json-ld-framing#30": { + 'issue json-ld-framing#30': { input: %({ "@context": {"eg": "https://example.org/ns/"}, "@id": "https://example.org/what", @@ -2277,7 +2278,7 @@ }] }) }, - "issue json-ld-framing#64": { + 'issue json-ld-framing#64': { input: %({ "@context": { "@version": 1.1, @@ -2301,7 +2302,7 @@ "Production": { "@context": { "part": { - "@type": "@id", + "@type": "@id", "@container": "@set" } } @@ -2316,7 +2317,7 @@ "Production": { "@context": { "part": { - "@type": "@id", + "@type": "@id", "@container": "@set" } } @@ -2335,7 +2336,7 @@ }), processingMode: "json-ld-1.1" }, - "issue json-ld-framing#27": { + 'issue json-ld-framing#27': { input: %({ "@id": "ex:cred", "ex:subject": { @@ -2395,7 +2396,7 @@ }), processingMode: "json-ld-1.1" }, - "missing types": { + 'missing types': { input: %({ "@context": { "ex": "http://example.com#", @@ -2463,7 +2464,7 @@ }), processingMode: "json-ld-1.1" }, - "issue #142": { + 'issue #142': { input: %({ "@context":{ "ex":"http://example.org/vocab#", @@ -2504,7 +2505,7 @@ "publisher": "JANE" } }), - processingMode: "json-ld-1.1" + processingMode: "json-ld-1.1" } }.each do |title, params| it title do @@ -2514,28 +2515,28 @@ end def do_frame(params) - begin - input, frame, output = params[:input], params[:frame], params[:output] - params = {processingMode: 'json-ld-1.0'}.merge(params) - input = ::JSON.parse(input) if input.is_a?(String) - frame = ::JSON.parse(frame) if frame.is_a?(String) - output = ::JSON.parse(output) if output.is_a?(String) - jld = nil - if params[:write] - expect{jld = JSON::LD::API.frame(input, frame, logger: logger, **params)}.to write(params[:write]).to(:error) - else - expect{jld = JSON::LD::API.frame(input, frame, logger: logger, **params)}.not_to write.to(:error) - end - expect(jld).to produce_jsonld(output, logger) - - # Compare expanded jld/output too to make sure list values remain ordered - exp_jld = JSON::LD::API.expand(jld, processingMode: 'json-ld-1.1') - exp_output = JSON::LD::API.expand(output, processingMode: 'json-ld-1.1') - expect(exp_jld).to produce_jsonld(exp_output, logger) - rescue JSON::LD::JsonLdError => e - fail("#{e.class}: #{e.message}\n" + - "#{logger}\n" + - "Backtrace:\n#{e.backtrace.join("\n")}") + input = params[:input] + frame = params[:frame] + output = params[:output] + params = { processingMode: 'json-ld-1.0' }.merge(params) + input = JSON.parse(input) if input.is_a?(String) + frame = JSON.parse(frame) if frame.is_a?(String) + output = JSON.parse(output) if output.is_a?(String) + jld = nil + if params[:write] + expect { jld = JSON::LD::API.frame(input, frame, logger: logger, **params) }.to write(params[:write]).to(:error) + else + expect { jld = JSON::LD::API.frame(input, frame, logger: logger, **params) }.not_to write.to(:error) end + expect(jld).to produce_jsonld(output, logger) + + # Compare expanded jld/output too to make sure list values remain ordered + exp_jld = JSON::LD::API.expand(jld, processingMode: 'json-ld-1.1') + exp_output = JSON::LD::API.expand(output, processingMode: 'json-ld-1.1') + expect(exp_jld).to produce_jsonld(exp_output, logger) + rescue JSON::LD::JsonLdError => e + raise("#{e.class}: #{e.message}\n" \ + "#{logger}\n" \ + "Backtrace:\n#{e.backtrace.join("\n")}") end end diff --git a/spec/from_rdf_spec.rb b/spec/from_rdf_spec.rb index ca9148ea..f9a61ec8 100644 --- a/spec/from_rdf_spec.rb +++ b/spec/from_rdf_spec.rb @@ -1,148 +1,149 @@ -# coding: utf-8 +# frozen_string_literal: true + require_relative 'spec_helper' require 'rdf/spec/writer' describe JSON::LD::API do - let(:logger) {RDF::Spec.logger} + let(:logger) { RDF::Spec.logger } describe ".fromRdf" do context "simple tests" do it "One subject IRI object" do input = %( .) expect(serialize(input)).to produce_jsonld([ - { - '@id' => "http://a/b", - "http://a/c" => [{"@id" => "http://a/d"}] - } - ], logger) + { + '@id' => "http://a/b", + "http://a/c" => [{ "@id" => "http://a/d" }] + } + ], logger) end - it "should generate object list" do + it "generates object list" do input = %(@prefix : . :b :c :d, :e .) - expect(serialize(input)). - to produce_jsonld([{ - '@id' => "http://example.com/b", - "http://example.com/c" => [ - {"@id" => "http://example.com/d"}, - {"@id" => "http://example.com/e"} - ] - } - ], logger) + expect(serialize(input)) + .to produce_jsonld([{ + '@id' => "http://example.com/b", + "http://example.com/c" => [ + { "@id" => "http://example.com/d" }, + { "@id" => "http://example.com/e" } + ] + }], logger) end - - it "should generate property list" do + + it "generates property list" do input = %(@prefix : . :b :c :d; :e :f .) - expect(serialize(input)). - to produce_jsonld([{ - '@id' => "http://example.com/b", - "http://example.com/c" => [{"@id" => "http://example.com/d"}], - "http://example.com/e" => [{"@id" => "http://example.com/f"}] - } - ], logger) + expect(serialize(input)) + .to produce_jsonld([{ + '@id' => "http://example.com/b", + "http://example.com/c" => [{ "@id" => "http://example.com/d" }], + "http://example.com/e" => [{ "@id" => "http://example.com/f" }] + }], logger) end - + it "serializes multiple subjects" do - input = %q( + input = ' @prefix : . @prefix dc: . a :TestCase . a :TestCase . - ) - expect(serialize(input)). - to produce_jsonld([ - {'@id' => "test-cases/0001", '@type' => ["http://www.w3.org/2006/03/test-description#TestCase"]}, - {'@id' => "test-cases/0002", '@type' => ["http://www.w3.org/2006/03/test-description#TestCase"]}, - ], logger) + ' + expect(serialize(input)) + .to produce_jsonld([ + { '@id' => "test-cases/0001", + '@type' => ["http://www.w3.org/2006/03/test-description#TestCase"] }, + { '@id' => "test-cases/0002", '@type' => ["http://www.w3.org/2006/03/test-description#TestCase"] } + ], logger) end end - + context "literals" do context "coercion" do it "typed literal" do input = %(@prefix ex: . ex:a ex:b "foo"^^ex:d .) expect(serialize(input)).to produce_jsonld([ - { - '@id' => "http://example.com/a", - "http://example.com/b" => [{"@value" => "foo", "@type" => "http://example.com/d"}] - } - ], logger) + { + '@id' => "http://example.com/a", + "http://example.com/b" => [{ "@value" => "foo", + "@type" => "http://example.com/d" }] + } + ], logger) end it "integer" do input = %(@prefix ex: . ex:a ex:b 1 .) expect(serialize(input, useNativeTypes: true)).to produce_jsonld([{ - '@id' => "http://example.com/a", - "http://example.com/b" => [{"@value" => 1}] + '@id' => "http://example.com/a", + "http://example.com/b" => [{ "@value" => 1 }] }], logger) end it "integer (non-native)" do input = %(@prefix ex: . ex:a ex:b 1 .) expect(serialize(input, useNativeTypes: false)).to produce_jsonld([{ - '@id' => "http://example.com/a", - "http://example.com/b" => [{"@value" => "1","@type" => "http://www.w3.org/2001/XMLSchema#integer"}] + '@id' => "http://example.com/a", + "http://example.com/b" => [{ "@value" => "1", "@type" => "http://www.w3.org/2001/XMLSchema#integer" }] }], logger) end it "boolean" do input = %(@prefix ex: . ex:a ex:b true .) expect(serialize(input, useNativeTypes: true)).to produce_jsonld([{ - '@id' => "http://example.com/a", - "http://example.com/b" => [{"@value" => true}] + '@id' => "http://example.com/a", + "http://example.com/b" => [{ "@value" => true }] }], logger) end it "boolean (non-native)" do input = %(@prefix ex: . ex:a ex:b true .) expect(serialize(input, useNativeTypes: false)).to produce_jsonld([{ - '@id' => "http://example.com/a", - "http://example.com/b" => [{"@value" => "true","@type" => "http://www.w3.org/2001/XMLSchema#boolean"}] + '@id' => "http://example.com/a", + "http://example.com/b" => [{ "@value" => "true", "@type" => "http://www.w3.org/2001/XMLSchema#boolean" }] }], logger) end it "decmal" do input = %(@prefix ex: . ex:a ex:b 1.0 .) expect(serialize(input, useNativeTypes: true)).to produce_jsonld([{ - '@id' => "http://example.com/a", - "http://example.com/b" => [{"@value" => "1.0", "@type" => "http://www.w3.org/2001/XMLSchema#decimal"}] + '@id' => "http://example.com/a", + "http://example.com/b" => [{ "@value" => "1.0", "@type" => "http://www.w3.org/2001/XMLSchema#decimal" }] }], logger) end it "double" do input = %(@prefix ex: . ex:a ex:b 1.0e0 .) expect(serialize(input, useNativeTypes: true)).to produce_jsonld([{ - '@id' => "http://example.com/a", - "http://example.com/b" => [{"@value" => 1.0E0}] + '@id' => "http://example.com/a", + "http://example.com/b" => [{ "@value" => 1.0E0 }] }], logger) end it "double (non-native)" do input = %(@prefix ex: . ex:a ex:b 1.0e0 .) expect(serialize(input, useNativeTypes: false)).to produce_jsonld([{ - '@id' => "http://example.com/a", - "http://example.com/b" => [{"@value" => "1.0E0","@type" => "http://www.w3.org/2001/XMLSchema#double"}] + '@id' => "http://example.com/a", + "http://example.com/b" => [{ "@value" => "1.0E0", "@type" => "http://www.w3.org/2001/XMLSchema#double" }] }], logger) end end context "datatyped (non-native)" do { - integer: 1, - unsignedInteger: 1, + integer: 1, + unsignedInteger: 1, nonNegativeInteger: 1, - float: 1, + float: 1, nonPositiveInteger: -1, - negativeInteger: -1, + negativeInteger: -1 }.each do |t, v| - it "#{t}" do + it t.to_s do input = %( @prefix xsd: . @prefix ex: . ex:a ex:b "#{v}"^^xsd:#{t} . ) expect(serialize(input, useNativeTypes: false)).to produce_jsonld([{ - '@id' => "http://example.com/a", - "http://example.com/b" => [{"@value" => "#{v}","@type" => "http://www.w3.org/2001/XMLSchema##{t}"}] + '@id' => "http://example.com/a", + "http://example.com/b" => [{ "@value" => v.to_s, "@type" => "http://www.w3.org/2001/XMLSchema##{t}" }] }], logger) end end @@ -151,25 +152,25 @@ it "encodes language literal" do input = %(@prefix ex: . ex:a ex:b "foo"@en-us .) expect(serialize(input)).to produce_jsonld([{ - '@id' => "http://example.com/a", - "http://example.com/b" => [{"@value" => "foo", "@language" => "en-us"}] + '@id' => "http://example.com/a", + "http://example.com/b" => [{ "@value" => "foo", "@language" => "en-us" }] }], logger) end context "with @type: @json" do { - "true": { + true => { output: %([{ "@id": "http://example.org/vocab#id", "http://example.org/vocab#bool": [{"@value": true, "@type": "@json"}] }]), - input:%( + input: %( @prefix ex: . @prefix rdf: . ex:id ex:bool "true"^^rdf:JSON . ) }, - "false": { + false => { output: %([{ "@id": "http://example.org/vocab#id", "http://example.org/vocab#bool": [{"@value": false, "@type": "@json"}] @@ -180,7 +181,7 @@ ex:id ex:bool "false"^^rdf:JSON . ) }, - "double": { + double: { output: %([{ "@id": "http://example.org/vocab#id", "http://example.org/vocab#double": [{"@value": 1.23E0, "@type": "@json"}] @@ -191,7 +192,7 @@ ex:id ex:double "1.23E0"^^rdf:JSON . ) }, - "double-zero": { + 'double-zero': { output: %([{ "@id": "http://example.org/vocab#id", "http://example.org/vocab#double": [{"@value": 0, "@type": "@json"}] @@ -202,7 +203,7 @@ ex:id ex:double "0.0E0"^^rdf:JSON . ) }, - "integer": { + integer: { output: %([{ "@id": "http://example.org/vocab#id", "http://example.org/vocab#integer": [{"@value": 123, "@type": "@json"}] @@ -213,7 +214,7 @@ ex:id ex:integer "123"^^rdf:JSON . ) }, - "string": { + string: { output: %([{ "@id": "http://example.org/vocab#id", "http://example.org/vocab#string": [{ @@ -227,7 +228,7 @@ ex:id ex:string "\\"string\\""^^rdf:JSON . ) }, - "null": { + null: { output: %([{ "@id": "http://example.org/vocab#id", "http://example.org/vocab#null": [{ @@ -241,7 +242,7 @@ ex:id ex:null "null"^^rdf:JSON . ) }, - "object": { + object: { output: %([{ "@id": "http://example.org/vocab#id", "http://example.org/vocab#object": [{"@value": {"foo": "bar"}, "@type": "@json"}] @@ -252,7 +253,7 @@ ex:id ex:object """{"foo":"bar"}"""^^rdf:JSON . ) }, - "array": { + array: { output: %([{ "@id": "http://example.org/vocab#id", "http://example.org/vocab#array": [{"@value": [{"foo": "bar"}], "@type": "@json"}] @@ -262,223 +263,224 @@ @prefix rdf: . ex:id ex:array """[{"foo":"bar"}]"""^^rdf:JSON . ) - }, + } }.each do |title, params| params[:input] = RDF::Graph.new << RDF::Turtle::Reader.new(params[:input]) - it(title) {do_fromRdf(processingMode: "json-ld-1.1", **params)} + it(title) { do_fromRdf(processingMode: "json-ld-1.1", **params) } end end context "extendedRepresentation: true" do { - "true": { + true => { output: [{ - "@id" => "http://example.org/vocab#id", - "http://example.org/vocab#bool" => [{"@value" => RDF::Literal(true)}] - }], - input:%( + "@id" => "http://example.org/vocab#id", + "http://example.org/vocab#bool" => [{ "@value" => RDF::Literal(true) }] + }], + input: %( @prefix ex: . @prefix rdf: . ex:id ex:bool true . ) }, - "false": { + false => { output: [{ - "@id" => "http://example.org/vocab#id", - "http://example.org/vocab#bool" => [{"@value" => RDF::Literal(false)}] - }], + "@id" => "http://example.org/vocab#id", + "http://example.org/vocab#bool" => [{ "@value" => RDF::Literal(false) }] + }], input: %( @prefix ex: . @prefix rdf: . ex:id ex:bool false . ) }, - "double": { + double: { output: [{ - "@id" => "http://example.org/vocab#id", - "http://example.org/vocab#double" => [{"@value" => RDF::Literal(1.23E0)}] - }], + "@id" => "http://example.org/vocab#id", + "http://example.org/vocab#double" => [{ "@value" => RDF::Literal(1.23E0) }] + }], input: %( @prefix ex: . @prefix rdf: . ex:id ex:double 1.23E0 . ) }, - "double-zero": { + 'double-zero': { output: [{ - "@id" => "http://example.org/vocab#id", - "http://example.org/vocab#double" => [{"@value" => RDF::Literal(0, datatype: RDF::XSD.double)}] - }], + "@id" => "http://example.org/vocab#id", + "http://example.org/vocab#double" => [{ "@value" => RDF::Literal(0, datatype: RDF::XSD.double) }] + }], input: %( @prefix ex: . @prefix rdf: . ex:id ex:double 0.0E0 . ) }, - "integer": { + integer: { output: [{ - "@id" => "http://example.org/vocab#id", - "http://example.org/vocab#integer" => [{"@value" => RDF::Literal(123)}] - }], + "@id" => "http://example.org/vocab#id", + "http://example.org/vocab#integer" => [{ "@value" => RDF::Literal(123) }] + }], input: %( @prefix ex: . @prefix rdf: . ex:id ex:integer 123 . ) - }, + } }.each do |title, params| params[:input] = RDF::Graph.new << RDF::Turtle::Reader.new(params[:input]) it(title) { do_fromRdf(processingMode: "json-ld-1.1", - useNativeTypes: true, - extendedRepresentation: true, - **params)} + useNativeTypes: true, + extendedRepresentation: true, + **params) + } end end end context "anons" do - it "should generate bare anon" do + it "generates bare anon" do input = %(@prefix : . _:a :a :b .) expect(serialize(input)).to produce_jsonld([ - { - "@id" => "_:a", - "http://example.com/a" => [{"@id" => "http://example.com/b"}] - } - ], logger) + { + "@id" => "_:a", + "http://example.com/a" => [{ "@id" => "http://example.com/b" }] + } + ], logger) end - - it "should generate anon as object" do + + it "generates anon as object" do input = %(@prefix : . :a :b _:a . _:a :c :d .) expect(serialize(input)).to produce_jsonld([ - { - "@id" => "_:a", - "http://example.com/c" => [{"@id" => "http://example.com/d"}] - }, - { - "@id" => "http://example.com/a", - "http://example.com/b" => [{"@id" => "_:a"}] - } - ], logger) + { + "@id" => "_:a", + "http://example.com/c" => [{ "@id" => "http://example.com/d" }] + }, + { + "@id" => "http://example.com/a", + "http://example.com/b" => [{ "@id" => "_:a" }] + } + ], logger) end end context "lists" do { "literal list" => { - input: %q( + input: ' @prefix : . @prefix rdf: . :a :b ("apple" "banana") . - ), + ', output: [{ - '@id' => "http://example.com/a", - "http://example.com/b" => [{ + '@id' => "http://example.com/a", + "http://example.com/b" => [{ "@list" => [ - {"@value" => "apple"}, - {"@value" => "banana"} + { "@value" => "apple" }, + { "@value" => "banana" } ] }] }] }, "iri list" => { - input: %q(@prefix : . :a :b (:c) .), + input: '@prefix : . :a :b (:c) .', output: [{ - '@id' => "http://example.com/a", - "http://example.com/b" => [{ + '@id' => "http://example.com/a", + "http://example.com/b" => [{ "@list" => [ - {"@id" => "http://example.com/c"} + { "@id" => "http://example.com/c" } ] }] }] }, "empty list" => { - input: %q(@prefix : . :a :b () .), + input: '@prefix : . :a :b () .', output: [{ - '@id' => "http://example.com/a", - "http://example.com/b" => [{"@list" => []}] + '@id' => "http://example.com/a", + "http://example.com/b" => [{ "@list" => [] }] }] }, "single element list" => { - input: %q(@prefix : . :a :b ( "apple" ) .), + input: '@prefix : . :a :b ( "apple" ) .', output: [{ - '@id' => "http://example.com/a", - "http://example.com/b" => [{"@list" => [{"@value" => "apple"}]}] + '@id' => "http://example.com/a", + "http://example.com/b" => [{ "@list" => [{ "@value" => "apple" }] }] }] }, "single element list without @type" => { - input: %q(@prefix : . :a :b ( _:a ) . _:a :b "foo" .), + input: '@prefix : . :a :b ( _:a ) . _:a :b "foo" .', output: [ { - '@id' => "_:a", - "http://example.com/b" => [{"@value" => "foo"}] + '@id' => "_:a", + "http://example.com/b" => [{ "@value" => "foo" }] }, { - '@id' => "http://example.com/a", - "http://example.com/b" => [{"@list" => [{"@id" => "_:a"}]}] - }, + '@id' => "http://example.com/a", + "http://example.com/b" => [{ "@list" => [{ "@id" => "_:a" }] }] + } ] }, "multiple graphs with shared BNode" => { - input: %q( + input: ' _:z0 . _:z0 "cell-A" . _:z0 _:z1 . _:z1 "cell-B" . _:z1 . _:z1 . - ), + ', output: [{ "@id" => "http://www.example.com/G", "@graph" => [{ "@id" => "_:z0", - "http://www.w3.org/1999/02/22-rdf-syntax-ns#first" => [{"@value" => "cell-A"}], - "http://www.w3.org/1999/02/22-rdf-syntax-ns#rest" => [{"@id" => "_:z1"}] + "http://www.w3.org/1999/02/22-rdf-syntax-ns#first" => [{ "@value" => "cell-A" }], + "http://www.w3.org/1999/02/22-rdf-syntax-ns#rest" => [{ "@id" => "_:z1" }] }, { "@id" => "_:z1", - "http://www.w3.org/1999/02/22-rdf-syntax-ns#first" => [{"@value" => "cell-B"}], - "http://www.w3.org/1999/02/22-rdf-syntax-ns#rest" => [{"@list" => []}] + "http://www.w3.org/1999/02/22-rdf-syntax-ns#first" => [{ "@value" => "cell-B" }], + "http://www.w3.org/1999/02/22-rdf-syntax-ns#rest" => [{ "@list" => [] }] }, { "@id" => "http://www.example.com/z", - "http://www.example.com/q" => [{"@id" => "_:z0"}] + "http://www.example.com/q" => [{ "@id" => "_:z0" }] }] }, - { - "@id" => "http://www.example.com/G1", - "@graph" => [{ - "@id" => "http://www.example.com/x", - "http://www.example.com/p" => [{"@id" => "_:z1"}] - }] - }], + { + "@id" => "http://www.example.com/G1", + "@graph" => [{ + "@id" => "http://www.example.com/x", + "http://www.example.com/p" => [{ "@id" => "_:z1" }] + }] + }], reader: RDF::NQuads::Reader }, "multiple graphs with shared BNode (at head)" => { - input: %q( + input: ' _:z0 . _:z0 "cell-A" . _:z0 _:z1 . _:z1 "cell-B" . _:z1 . _:z0 . - ), + ', output: [{ "@id" => "http://www.example.com/G", "@graph" => [{ "@id" => "_:z0", - "http://www.w3.org/1999/02/22-rdf-syntax-ns#first" => [{"@value" => "cell-A"}], - "http://www.w3.org/1999/02/22-rdf-syntax-ns#rest" => [{"@list" => [{ "@value" => "cell-B" }]}] + "http://www.w3.org/1999/02/22-rdf-syntax-ns#first" => [{ "@value" => "cell-A" }], + "http://www.w3.org/1999/02/22-rdf-syntax-ns#rest" => [{ "@list" => [{ "@value" => "cell-B" }] }] }, { "@id" => "http://www.example.com/z", - "http://www.example.com/q" => [{"@id" => "_:z0"}] + "http://www.example.com/q" => [{ "@id" => "_:z0" }] }] }, - { - "@id" => "http://www.example.com/G1", - "@graph" => [{ - "@id" => "http://www.example.com/z", - "http://www.example.com/q" => [{"@id" => "_:z0"}] - }] - }], + { + "@id" => "http://www.example.com/G1", + "@graph" => [{ + "@id" => "http://www.example.com/z", + "http://www.example.com/q" => [{ "@id" => "_:z0" }] + }] + }], reader: RDF::NQuads::Reader }, "@list containing empty @list" => { @@ -552,12 +554,12 @@ reader: RDF::NQuads::Reader } }.each do |name, params| - it "#{name}" do + it name.to_s do do_fromRdf(params) end end end - + context "quads" do { "simple named graph" => { @@ -569,9 +571,9 @@ "@id" => "http://example.com/U", "@graph" => [{ "@id" => "http://example.com/a", - "http://example.com/b" => [{"@id" => "http://example.com/c"}] + "http://example.com/b" => [{ "@id" => "http://example.com/c" }] }] - }, + } ] }, "with properties" => { @@ -584,9 +586,9 @@ "@id" => "http://example.com/U", "@graph" => [{ "@id" => "http://example.com/a", - "http://example.com/b" => [{"@id" => "http://example.com/c"}] + "http://example.com/b" => [{ "@id" => "http://example.com/c" }] }], - "http://example.com/d" => [{"@id" => "http://example.com/e"}] + "http://example.com/d" => [{ "@id" => "http://example.com/e" }] } ] }, @@ -604,9 +606,9 @@ "@id" => "http://example.com/U", "@graph" => [{ "@id" => "http://example.com/a", - "http://example.com/b" => [{"@list" => [{"@id" => "http://example.com/c"}]}] + "http://example.com/b" => [{ "@list" => [{ "@id" => "http://example.com/c" }] }] }], - "http://example.com/d" => [{"@list" => [{"@id" => "http://example.com/e"}]}] + "http://example.com/d" => [{ "@list" => [{ "@id" => "http://example.com/e" }] }] } ] }, @@ -626,7 +628,7 @@ { "@id" => "http://example.com/a", "http://example.com/b" => [{ - "@list" => [{"@id" => "http://example.com/c"}] + "@list" => [{ "@id" => "http://example.com/c" }] }] } ] @@ -637,15 +639,15 @@ { "@id" => "http://example.com/a", "http://example.com/b" => [{ - "@list" => [{"@id" => "http://example.com/e"}] + "@list" => [{ "@id" => "http://example.com/e" }] }] } ] } ] - }, + } }.each_pair do |name, params| - it "#{name}" do + it name.to_s do do_fromRdf(params.merge(reader: RDF::NQuads::Reader)) end end @@ -654,51 +656,51 @@ context "@direction" do context "rdfDirection: null" do { - "no language rtl datatype": { - input: %q( + 'no language rtl datatype': { + input: ' "no language"^^ . - ), - output: %q([{ + ', + output: '[{ "@id": "http://example.com/a", "http://example.org/label": [{"@value": "no language", "@type": "https://www.w3.org/ns/i18n#_rtl"}] - }]), + }]' }, - "no language rtl compound-literal": { - input: %q( + 'no language rtl compound-literal': { + input: ' @prefix rdf: . _:cl1 . _:cl1 rdf:value "no language"; rdf:direction "rtl" . - ), - output: %q([{ + ', + output: '[{ "@id": "http://example.com/a", "http://example.org/label": [{"@id": "_:cl1"}] }, { "@id": "_:cl1", "http://www.w3.org/1999/02/22-rdf-syntax-ns#value": [{"@value": "no language"}], "http://www.w3.org/1999/02/22-rdf-syntax-ns#direction": [{"@value": "rtl"}] - }]), + }]' }, - "en-US rtl datatype": { - input: %q( + 'en-US rtl datatype': { + input: ' "en-US"^^ . - ), - output: %q([{ + ', + output: '[{ "@id": "http://example.com/a", "http://example.org/label": [{"@value": "en-US", "@type": "https://www.w3.org/ns/i18n#en-us_rtl"}] - }]), + }]' }, - "en-US rtl compound-literal": { - input: %q( + 'en-US rtl compound-literal': { + input: ' @prefix rdf: . _:cl1 . _:cl1 rdf:value "en-US"; rdf:language "en-us"; rdf:direction "rtl" . - ), - output: %q([{ + ', + output: '[{ "@id": "http://example.com/a", "http://example.org/label": [{"@id": "_:cl1"}] }, { @@ -706,7 +708,7 @@ "http://www.w3.org/1999/02/22-rdf-syntax-ns#value": [{"@value": "en-US"}], "http://www.w3.org/1999/02/22-rdf-syntax-ns#language": [{"@value": "en-us"}], "http://www.w3.org/1999/02/22-rdf-syntax-ns#direction": [{"@value": "rtl"}] - }]), + }]' } }.each_pair do |name, params| it name do @@ -717,51 +719,51 @@ context "rdfDirection: i18n-datatype" do { - "no language rtl datatype": { - input: %q( + 'no language rtl datatype': { + input: ' "no language"^^ . - ), - output: %q([{ + ', + output: '[{ "@id": "http://example.com/a", "http://example.org/label": [{"@value": "no language", "@direction": "rtl"}] - }]), + }]' }, - "no language rtl compound-literal": { - input: %q( + 'no language rtl compound-literal': { + input: ' @prefix rdf: . _:cl1 . _:cl1 rdf:value "no language"; rdf:direction "rtl" . - ), - output: %q([{ + ', + output: '[{ "@id": "http://example.com/a", "http://example.org/label": [{"@id": "_:cl1"}] }, { "@id": "_:cl1", "http://www.w3.org/1999/02/22-rdf-syntax-ns#value": [{"@value": "no language"}], "http://www.w3.org/1999/02/22-rdf-syntax-ns#direction": [{"@value": "rtl"}] - }]), + }]' }, - "en-US rtl datatype": { - input: %q( + 'en-US rtl datatype': { + input: ' "en-US"^^ . - ), - output: %q([{ + ', + output: '[{ "@id": "http://example.com/a", "http://example.org/label": [{"@value": "en-US", "@language": "en-US", "@direction": "rtl"}] - }]), + }]' }, - "en-US rtl compound-literal": { - input: %q( + 'en-US rtl compound-literal': { + input: ' @prefix rdf: . _:cl1 . _:cl1 rdf:value "en-US"; rdf:language "en-US"; rdf:direction "rtl" . - ), - output: %q([{ + ', + output: '[{ "@id": "http://example.com/a", "http://example.org/label": [{"@id": "_:cl1"}] }, { @@ -769,65 +771,67 @@ "http://www.w3.org/1999/02/22-rdf-syntax-ns#value": [{"@value": "en-US"}], "http://www.w3.org/1999/02/22-rdf-syntax-ns#language": [{"@value": "en-US"}], "http://www.w3.org/1999/02/22-rdf-syntax-ns#direction": [{"@value": "rtl"}] - }]), + }]' } }.each_pair do |name, params| it name do - do_fromRdf(params.merge(reader: RDF::Turtle::Reader, rdfDirection: 'i18n-datatype', processingMode: 'json-ld-1.1')) + do_fromRdf(params.merge(reader: RDF::Turtle::Reader, rdfDirection: 'i18n-datatype', + processingMode: 'json-ld-1.1')) end end end context "rdfDirection: compound-literal" do { - "no language rtl datatype": { - input: %q( + 'no language rtl datatype': { + input: ' "no language"^^ . - ), - output: %q([{ + ', + output: '[{ "@id": "http://example.com/a", "http://example.org/label": [{"@value": "no language", "@type": "https://www.w3.org/ns/i18n#_rtl"}] - }]), + }]' }, - "no language rtl compound-literal": { - input: %q( + 'no language rtl compound-literal': { + input: ' @prefix rdf: . _:cl1 . _:cl1 rdf:value "no language"; rdf:direction "rtl" . - ), - output: %q([{ + ', + output: '[{ "@id": "http://example.com/a", "http://example.org/label": [{"@value": "no language", "@direction": "rtl"}] - }]), + }]' }, - "en-US rtl datatype": { - input: %q( + 'en-US rtl datatype': { + input: ' "en-US"^^ . - ), - output: %q([{ + ', + output: '[{ "@id": "http://example.com/a", "http://example.org/label": [{"@value": "en-US", "@type": "https://www.w3.org/ns/i18n#en-us_rtl"}] - }]), + }]' }, - "en-US rtl compound-literal": { - input: %q( + 'en-US rtl compound-literal': { + input: ' @prefix rdf: . _:cl1 . _:cl1 rdf:value "en-US"; rdf:language "en-us"; rdf:direction "rtl" . - ), - output: %q([{ + ', + output: '[{ "@id": "http://example.com/a", "http://example.org/label": [{"@value": "en-US", "@language": "en-us", "@direction": "rtl"}] - }]), + }]' } }.each_pair do |name, params| it name do - do_fromRdf(params.merge(reader: RDF::Turtle::Reader, rdfDirection: 'compound-literal', processingMode: 'json-ld-1.1')) + do_fromRdf(params.merge(reader: RDF::Turtle::Reader, rdfDirection: 'compound-literal', + processingMode: 'json-ld-1.1')) end end end @@ -835,14 +839,16 @@ context "RDF-star" do { - "subject-iii": { + 'subject-iii': { input: RDF::Statement( RDF::Statement( RDF::URI('http://example/s1'), RDF::URI('http://example/p1'), - RDF::URI('http://example/o1')), + RDF::URI('http://example/o1') + ), RDF::URI('http://example/p'), - RDF::URI('http://example/o')), + RDF::URI('http://example/o') + ), output: %([{ "@id": { "@id": "http://example/s1", @@ -851,14 +857,16 @@ "http://example/p": [{"@id": "http://example/o"}] }]) }, - "subject-iib": { + 'subject-iib': { input: RDF::Statement( RDF::Statement( RDF::URI('http://example/s1'), RDF::URI('http://example/p1'), - RDF::Node.new('o1')), + RDF::Node.new('o1') + ), RDF::URI('http://example/p'), - RDF::URI('http://example/o')), + RDF::URI('http://example/o') + ), output: %([{ "@id": { "@id": "http://example/s1", @@ -867,14 +875,16 @@ "http://example/p": [{"@id": "http://example/o"}] }]) }, - "subject-iil": { + 'subject-iil': { input: RDF::Statement( RDF::Statement( RDF::URI('http://example/s1'), RDF::URI('http://example/p1'), - RDF::Literal('o1')), + RDF::Literal('o1') + ), RDF::URI('http://example/p'), - RDF::URI('http://example/o')), + RDF::URI('http://example/o') + ), output: %([{ "@id": { "@id": "http://example/s1", @@ -883,14 +893,16 @@ "http://example/p": [{"@id": "http://example/o"}] }]) }, - "subject-bii": { + 'subject-bii': { input: RDF::Statement( RDF::Statement( RDF::Node('s1'), RDF::URI('http://example/p1'), - RDF::URI('http://example/o1')), + RDF::URI('http://example/o1') + ), RDF::URI('http://example/p'), - RDF::URI('http://example/o')), + RDF::URI('http://example/o') + ), output: %([{ "@id": { "@id": "_:s1", @@ -899,13 +911,15 @@ "http://example/p": [{"@id": "http://example/o"}] }]) }, - "subject-bib": { + 'subject-bib': { input: RDF::Statement( RDF::Statement( RDF::Node('s1'), RDF::URI('http://example/p1'), - RDF::Node.new('o1')), - RDF::URI('http://example/p'), RDF::URI('http://example/o')), + RDF::Node.new('o1') + ), + RDF::URI('http://example/p'), RDF::URI('http://example/o') + ), output: %([{ "@id": { "@id": "_:s1", @@ -914,14 +928,16 @@ "http://example/p": [{"@id": "http://example/o"}] }]) }, - "subject-bil": { + 'subject-bil': { input: RDF::Statement( RDF::Statement( RDF::Node('s1'), RDF::URI('http://example/p1'), - RDF::Literal('o1')), + RDF::Literal('o1') + ), RDF::URI('http://example/p'), - RDF::URI('http://example/o')), + RDF::URI('http://example/o') + ), output: %([{ "@id": { "@id": "_:s1", @@ -930,14 +946,16 @@ "http://example/p": [{"@id": "http://example/o"}] }]) }, - "object-iii": { + 'object-iii': { input: RDF::Statement( RDF::URI('http://example/s'), RDF::URI('http://example/p'), RDF::Statement( RDF::URI('http://example/s1'), RDF::URI('http://example/p1'), - RDF::URI('http://example/o1'))), + RDF::URI('http://example/o1') + ) + ), output: %([{ "@id": "http://example/s", "http://example/p": [{ @@ -948,14 +966,16 @@ }] }]) }, - "object-iib": { + 'object-iib': { input: RDF::Statement( RDF::URI('http://example/s'), RDF::URI('http://example/p'), RDF::Statement( RDF::URI('http://example/s1'), RDF::URI('http://example/p1'), - RDF::Node.new('o1'))), + RDF::Node.new('o1') + ) + ), output: %([{ "@id": "http://example/s", "http://example/p": [{ @@ -966,14 +986,16 @@ }] }]) }, - "object-iil": { + 'object-iil': { input: RDF::Statement( RDF::URI('http://example/s'), RDF::URI('http://example/p'), RDF::Statement( RDF::URI('http://example/s1'), RDF::URI('http://example/p1'), - RDF::Literal('o1'))), + RDF::Literal('o1') + ) + ), output: %([{ "@id": "http://example/s", "http://example/p": [{ @@ -984,17 +1006,20 @@ }] }]) }, - "recursive-subject": { + 'recursive-subject': { input: RDF::Statement( RDF::Statement( RDF::Statement( RDF::URI('http://example/s2'), RDF::URI('http://example/p2'), - RDF::URI('http://example/o2')), + RDF::URI('http://example/o2') + ), RDF::URI('http://example/p1'), - RDF::URI('http://example/o1')), + RDF::URI('http://example/o1') + ), RDF::URI('http://example/p'), - RDF::URI('http://example/o')), + RDF::URI('http://example/o') + ), output: %([{ "@id": { "@id": { @@ -1005,11 +1030,11 @@ }, "http://example/p": [{"@id": "http://example/o"}] }]) - }, + } }.each do |name, params| it name do - graph = RDF::Graph.new {|g| g << params[:input]} - do_fromRdf(params.merge(input: graph, prefixes: {ex: 'http://example/'})) + graph = RDF::Graph.new { |g| g << params[:input] } + do_fromRdf(params.merge(input: graph, prefixes: { ex: 'http://example/' })) end end end @@ -1029,9 +1054,9 @@ { "@id" => "http://www.w3.org/2001/XMLSchema#boolean" } ] }] - }, + } }.each do |t, params| - it "#{t}" do + it t.to_s do do_fromRdf(params) end end @@ -1053,20 +1078,19 @@ def serialize(ntstr, **options) end def do_fromRdf(params) - begin - input, output = params[:input], params[:output] - output = ::JSON.parse(output) if output.is_a?(String) - jld = nil - if params[:write] - expect{jld = serialize(input, **params)}.to write(params[:write]).to(:error) - else - expect{jld = serialize(input, **params)}.not_to write.to(:error) - end - expect(jld).to produce_jsonld(output, logger) - rescue JSON::LD::JsonLdError => e - fail("#{e.class}: #{e.message}\n" + - "#{logger}\n" + - "Backtrace:\n#{e.backtrace.join("\n")}") + input = params[:input] + output = params[:output] + output = JSON.parse(output) if output.is_a?(String) + jld = nil + if params[:write] + expect { jld = serialize(input, **params) }.to write(params[:write]).to(:error) + else + expect { jld = serialize(input, **params) }.not_to write.to(:error) end + expect(jld).to produce_jsonld(output, logger) + rescue JSON::LD::JsonLdError => e + raise("#{e.class}: #{e.message}\n" \ + "#{logger}\n" \ + "Backtrace:\n#{e.backtrace.join("\n")}") end end diff --git a/spec/matchers.rb b/spec/matchers.rb index e5e9c35a..76cc71c2 100644 --- a/spec/matchers.rb +++ b/spec/matchers.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require 'rspec/matchers' # @see https://rubygems.org/gems/rspec require_relative 'support/extensions' @@ -7,14 +9,26 @@ end failure_message do |actual| - "Expected: #{expected.is_a?(String) ? expected : expected.to_json(JSON_STATE) rescue 'malformed json'}\n" + - "Actual : #{actual.is_a?(String) ? actual : actual.to_json(JSON_STATE) rescue 'malformed json'}\n" + - "\nDebug:\n#{logger}" + "Expected: #{begin + expected.is_a?(String) ? expected : expected.to_json(JSON_STATE) + rescue StandardError + 'malformed json' + end}\n" \ + "Actual : #{begin + actual.is_a?(String) ? actual : actual.to_json(JSON_STATE) + rescue StandardError + 'malformed json' + end}\n" \ + "\nDebug:\n#{logger}" end failure_message_when_negated do |actual| - "Expected not to produce the following:\n" + - "Actual : #{actual.is_a?(String) ? actual : actual.to_json(JSON_STATE) rescue 'malformed json'}\n" + - "\nDebug:\n#{logger}" + "Expected not to produce the following:\n" \ + "Actual : #{begin + actual.is_a?(String) ? actual : actual.to_json(JSON_STATE) + rescue StandardError + 'malformed json' + end}\n" \ + "\nDebug:\n#{logger}" end end diff --git a/spec/rdfstar_spec.rb b/spec/rdfstar_spec.rb index cb9ea7af..08811d9e 100644 --- a/spec/rdfstar_spec.rb +++ b/spec/rdfstar_spec.rb @@ -1,25 +1,28 @@ -# coding: utf-8 +# frozen_string_literal: true + require_relative 'spec_helper' -describe JSON::LD do - describe "test suite" do - require_relative 'suite_helper' - %w{ - expand - compact - flatten - fromRdf - toRdf - }.each do |partial| - m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::STAR_SUITE}#{partial}-manifest.jsonld") - describe m.name do - m.entries.each do |t| - specify "#{t.property('@id')}: #{t.name}#{' (negative test)' unless t.positiveTest?}" do - t.options[:ordered] = false - expect {t.run self}.not_to write.to(:error) +unless ENV['CI'] + describe JSON::LD do + describe "test suite" do + require_relative 'suite_helper' + %w[ + expand + compact + flatten + fromRdf + toRdf + ].each do |partial| + m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::STAR_SUITE}#{partial}-manifest.jsonld") + describe m.name do + m.entries.each do |t| + specify "#{t.property('@id')}: #{t.name}#{' (negative test)' unless t.positiveTest?}" do + t.options[:ordered] = false + expect { t.run self }.not_to write.to(:error) + end end end end end end -end unless ENV['CI'] \ No newline at end of file +end diff --git a/spec/reader_spec.rb b/spec/reader_spec.rb index d2832b05..8c1005a3 100644 --- a/spec/reader_spec.rb +++ b/spec/reader_spec.rb @@ -1,32 +1,33 @@ -# coding: utf-8 +# frozen_string_literal: true + require_relative 'spec_helper' require 'rdf/spec/reader' describe JSON::LD::Reader do - let!(:doap) {File.expand_path("../../etc/doap.jsonld", __FILE__)} - let!(:doap_nt) {File.expand_path("../../etc/doap.nt", __FILE__)} - let!(:doap_count) {File.open(doap_nt).each_line.to_a.length} - let(:logger) {RDF::Spec.logger} + let!(:doap) { File.expand_path('../etc/doap.jsonld', __dir__) } + let!(:doap_nt) { File.expand_path('../etc/doap.nt', __dir__) } + let!(:doap_count) { File.open(doap_nt).each_line.to_a.length } + let(:logger) { RDF::Spec.logger } - after(:each) {|example| puts logger.to_s if example.exception} + after { |example| puts logger if example.exception } it_behaves_like 'an RDF::Reader' do - let(:reader_input) {File.read(doap)} - let(:reader) {JSON::LD::Reader.new(reader_input)} - let(:reader_count) {doap_count} + let(:reader_input) { File.read(doap) } + let(:reader) { JSON::LD::Reader.new(reader_input) } + let(:reader_count) { doap_count } end describe ".for" do - formats = [ + [ :jsonld, "etc/doap.jsonld", - {file_name: 'etc/doap.jsonld'}, - {file_extension: 'jsonld'}, - {content_type: 'application/ld+json'}, - {content_type: 'application/x-ld+json'}, + { file_name: 'etc/doap.jsonld' }, + { file_extension: 'jsonld' }, + { content_type: 'application/ld+json' }, + { content_type: 'application/x-ld+json' } ].each do |arg| it "discovers with #{arg.inspect}" do - expect(RDF::Reader.for(arg)).to eq JSON::LD::Reader + expect(RDF::Reader.for(arg)).to eq described_class end end end @@ -41,14 +42,14 @@ context :interface do { - plain: %q({ + plain: '{ "@context": {"foaf": "http://xmlns.com/foaf/0.1/"}, "@id": "_:bnode1", "@type": "foaf:Person", "foaf:homepage": "http://example.com/bob/", "foaf:name": "Bob" - }), - leading_comment: %q( + }', + leading_comment: ' // A comment before content { "@context": {"foaf": "http://xmlns.com/foaf/0.1/"}, @@ -56,8 +57,8 @@ "@type": "foaf:Person", "foaf:homepage": "http://example.com/bob/", "foaf:name": "Bob" - }), - script: %q(), - script_comments: %q(', + script_comments: '), + ' }.each do |variant, src| context variant do - subject {src} + subject { src } describe "#initialize" do it "yields reader given string" do @@ -127,7 +128,8 @@ describe "Base IRI resolution" do # From https://gist.github.com/RubenVerborgh/39f0e8d63e33e435371a - let(:json) {%q{[ + let(:json) do + '[ { "@context": {"@base": "http://a/bb/ccc/d;p?q", "urn:ex:p": {"@type": "@id"}}, "@graph": [ @@ -519,8 +521,10 @@ {"@id": "urn:ex:s306", "urn:ex:p": "../xyz"} ] } - ]}} - let(:nt) {%q{ + ]' + end + let(:nt) do + ' # RFC3986 normal examples . @@ -873,10 +877,12 @@ . . . - }} + ' + end + it "produces equivalent triples" do nt_str = RDF::NTriples::Reader.new(nt).dump(:ntriples) - json_str = JSON::LD::Reader.new(json).dump(:ntriples) + json_str = described_class.new(json).dump(:ntriples) expect(json_str).to eql(nt_str) end end diff --git a/spec/resource_spec.rb b/spec/resource_spec.rb index 5e36bcb4..84e04d21 100644 --- a/spec/resource_spec.rb +++ b/spec/resource_spec.rb @@ -1,32 +1,39 @@ -# coding: utf-8 +# frozen_string_literal: true + require_relative 'spec_helper' describe JSON::LD::Resource do - subject {JSON::LD::Resource.new({'@id' => '_:foo', "http://schema.org/name" => "foo"})} + subject { JSON::LD::Resource.new({ '@id' => '_:foo', "http://schema.org/name" => "foo" }) } + describe "#initialize" do - specify {expect(subject).not_to be_nil} - specify {expect(subject).to be_a(JSON::LD::Resource)} - specify {expect(subject).not_to be_clean} - specify {expect(subject).to be_anonymous} - specify {expect(subject).to be_dirty} - specify {expect(subject).to be_new} - specify {expect(subject).not_to be_resolved} - specify {expect(subject).not_to be_stub} + specify { expect(subject).not_to be_nil } + specify { expect(subject).to be_a(described_class) } + specify { expect(subject).not_to be_clean } + specify { expect(subject).to be_anonymous } + specify { expect(subject).to be_dirty } + specify { expect(subject).to be_new } + specify { expect(subject).not_to be_resolved } + specify { expect(subject).not_to be_stub } + context "schema:name property" do - specify {expect(subject.property("http://schema.org/name")).to eq "foo"} + specify { expect(subject.property("http://schema.org/name")).to eq "foo" } end describe "compacted with context" do - subject {JSON::LD::Resource.new({'@id' => '_:foo', "http://schema.org/name" => "foo"}, compact: true, context: {"@vocab" => "http://schema.org/"})} - specify {expect(subject).not_to be_nil} - specify {expect(subject).to be_a(JSON::LD::Resource)} - specify {expect(subject).not_to be_clean} - specify {expect(subject).to be_anonymous} - specify {expect(subject).to be_dirty} - specify {expect(subject).to be_new} - specify {expect(subject).not_to be_resolved} - specify {expect(subject).not_to be_stub} - its(:name) {should eq "foo"} + subject do + described_class.new({ '@id' => '_:foo', "http://schema.org/name" => "foo" }, compact: true, + context: { "@vocab" => "http://schema.org/" }) + end + + specify { expect(subject).not_to be_nil } + specify { expect(subject).to be_a(described_class) } + specify { expect(subject).not_to be_clean } + specify { expect(subject).to be_anonymous } + specify { expect(subject).to be_dirty } + specify { expect(subject).to be_new } + specify { expect(subject).not_to be_resolved } + specify { expect(subject).not_to be_stub } + its(:name) { is_expected.to eq "foo" } end end @@ -39,8 +46,8 @@ end describe "#hash" do - specify {expect(subject.hash).to be_a(Integer)} - + specify { expect(subject.hash).to be_a(Integer) } + it "returns the hash of the attributes" do expect(subject.hash).to eq subject.deresolve.hash end @@ -51,17 +58,18 @@ expect(subject.to_json).to be_a(String) expect(JSON.parse(subject.to_json)).to be_a(Hash) end + it "has same ID" do expect(JSON.parse(subject.to_json)['@id']).to eq subject.id end end describe "#each" do - specify {expect {|b| subject.each(&b)}.to yield_with_args(subject.statements.first)} + specify { expect { |b| subject.each(&b) }.to yield_with_args(subject.statements.first) } end describe RDF::Enumerable do - specify {expect(subject).to be_enumerable} + specify { expect(subject).to be_enumerable } it "initializes a graph" do g = RDF::Graph.new << subject @@ -71,6 +79,6 @@ end describe "#save" do - specify {expect {subject.save}.to raise_error(NotImplementedError)} + specify { expect { subject.save }.to raise_error(NotImplementedError) } end end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 541b2ff5..b81e71ba 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -1,5 +1,7 @@ -$:.unshift(File.join("../../lib", __FILE__)) -$:.unshift File.dirname(__FILE__) +# frozen_string_literal: true + +$LOAD_PATH.unshift(File.join("../../lib", __FILE__)) +$LOAD_PATH.unshift File.dirname(__FILE__) require "bundler/setup" require 'rspec' @@ -17,15 +19,15 @@ require 'simplecov' require 'simplecov-lcov' SimpleCov::Formatter::LcovFormatter.config do |config| - #Coveralls is coverage by default/lcov. Send info results + # Coveralls is coverage by default/lcov. Send info results config.report_with_single_file = true config.single_report_path = 'coverage/lcov.info' end SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter.new([ - SimpleCov::Formatter::HTMLFormatter, - SimpleCov::Formatter::LcovFormatter - ]) + SimpleCov::Formatter::HTMLFormatter, + SimpleCov::Formatter::LcovFormatter + ]) SimpleCov.start do add_filter "/spec/" end @@ -50,7 +52,7 @@ Dir.mkdir(URI_CACHE) unless File.directory?(URI_CACHE) # Cache client requests -::RSpec.configure do |c| +RSpec.configure do |c| c.filter_run focus: true c.run_all_when_everything_filtered = true c.include(RDF::Spec::Matchers) @@ -67,9 +69,9 @@ def detect_format(stream) string = stream.to_s end case string - when / v.to_s)} + if (bijection = ds_actual.bijection_to(ds_expected)) + bijection = bijection.inject({}) { |memo, (k, v)| memo.merge(k.to_s => v.to_s) } # Recursively replace blank nodes in actual with the bijection replace_nodes(actual, bijection) @@ -92,7 +94,7 @@ def remap_bnodes(actual, expected) def replace_nodes(object, bijection) case object when Array - object.map {|o| replace_nodes(o, bijection)} + object.map { |o| replace_nodes(o, bijection) } when Hash object.inject({}) do |memo, (k, v)| memo.merge(bijection.fetch(k, k) => replace_nodes(v, bijection)) @@ -104,7 +106,6 @@ def replace_nodes(object, bijection) end end - LIBRARY_INPUT = JSON.parse(%([ { "@id": "http://example.org/library", diff --git a/spec/streaming_reader_spec.rb b/spec/streaming_reader_spec.rb index 59e92b9c..c3689a0b 100644 --- a/spec/streaming_reader_spec.rb +++ b/spec/streaming_reader_spec.rb @@ -1,39 +1,40 @@ -# coding: utf-8 +# frozen_string_literal: true + require_relative 'spec_helper' require 'rdf/spec/reader' -describe JSON::LD::Reader do - let!(:doap) {File.expand_path("../../etc/doap.jsonld", __FILE__)} - let!(:doap_nt) {File.expand_path("../../etc/doap.nt", __FILE__)} - let!(:doap_count) {File.open(doap_nt).each_line.to_a.length} - let(:logger) {RDF::Spec.logger} +describe JSON::LD::StreamingReader do + let!(:doap) { File.expand_path('../etc/doap.jsonld', __dir__) } + let!(:doap_nt) { File.expand_path('../etc/doap.nt', __dir__) } + let!(:doap_count) { File.open(doap_nt).each_line.to_a.length } + let(:logger) { RDF::Spec.logger } - after(:each) {|example| puts logger.to_s if example.exception} + after { |example| puts logger if example.exception } it_behaves_like 'an RDF::Reader' do - let(:reader_input) {File.read(doap)} - let(:reader) {JSON::LD::Reader.new(reader_input, stream: true)} - let(:reader_count) {doap_count} + let(:reader_input) { File.read(doap) } + let(:reader) { JSON::LD::Reader.new(reader_input, stream: true) } + let(:reader_count) { doap_count } end context "when validating", pending: ("JRuby support for jsonlint" if RUBY_ENGINE == "jruby") do it "detects invalid JSON" do expect do |b| - described_class.new(StringIO.new(%({"a": "b", "a": "c"})), validate: true, logger: false).each_statement(&b) + JSON::LD::Reader.new(StringIO.new(%({"a": "b", "a": "c"})), validate: true, logger: false).each_statement(&b) end.to raise_error(RDF::ReaderError) end end context :interface do { - plain: %q({ + plain: '{ "@context": {"foaf": "http://xmlns.com/foaf/0.1/"}, "@type": "foaf:Person", "@id": "_:bnode1", "foaf:homepage": "http://example.com/bob/", "foaf:name": "Bob" - }), - leading_comment: %q( + }', + leading_comment: ' // A comment before content { "@context": {"foaf": "http://xmlns.com/foaf/0.1/"}, @@ -41,8 +42,8 @@ "@id": "_:bnode1", "foaf:homepage": "http://example.com/bob/", "foaf:name": "Bob" - }), - script: %q(), - script_comments: %q(', + script_comments: '), + ' }.each do |variant, src| context variant do - subject {src} + subject { src } describe "#initialize" do it "yields reader given string" do @@ -112,7 +113,7 @@ context "Selected toRdf tests" do { - "e004": { + e004: { input: %({ "@context": { "mylist1": {"@id": "http://example.com/mylist1", "@container": "@list"} @@ -128,7 +129,7 @@ . ) }, - "e015": { + e015: { input: %({ "@context": { "myset2": {"@id": "http://example.com/myset2", "@container": "@set" } @@ -139,7 +140,7 @@ expect: %( ) }, - "in06": { + in06: { input: %({ "@context": { "@version": 1.1, @@ -175,60 +176,71 @@ end end - describe "test suite" do - require_relative 'suite_helper' - m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::STREAM_SUITE}stream-toRdf-manifest.jsonld") - describe m.name do - m.entries.each do |t| - specify "#{t.property('@id')}: #{t.name}#{' (negative test)' unless t.positiveTest?}" do - pending "Generalized RDF" if t.options[:produceGeneralizedRdf] - pending "@nest defining @id" if %w(#tin06).include?(t.property('@id')) - pending "double @reverse" if %w(#te043).include?(t.property('@id')) - pending "graph map containing named graph" if %w(#te084 #te087 #te098 #te101 #te105 #te106).include?(t.property('@id')) - pending "named graphs" if %w(#t0029 #te021).include?(t.property('@id')) - - if %w(#t0118).include?(t.property('@id')) - expect {t.run self}.to write(/Statement .* is invalid/).to(:error) - elsif %w(#twf07).include?(t.property('@id')) - expect {t.run self}.to write(/skipping graph statement within invalid graph name/).to(:error) - elsif %w(#te075).include?(t.property('@id')) - expect {t.run self}.to write(/is invalid/).to(:error) - elsif %w(#te005 #tpr34 #tpr35 #tpr36 #tpr37 #tpr38 #tpr39 #te119 #te120).include?(t.property('@id')) - expect {t.run self}.to write("beginning with '@' are reserved for future use").to(:error) - elsif %w(#te068).include?(t.property('@id')) - expect {t.run self}.to write("[DEPRECATION]").to(:error) - elsif %w(#twf05).include?(t.property('@id')) - expect {t.run self}.to write("@language must be valid BCP47").to(:error) - else - expect {t.run self}.not_to write.to(:error) + unless ENV['CI'] + describe "test suite" do + require_relative 'suite_helper' + m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::STREAM_SUITE}stream-toRdf-manifest.jsonld") + describe m.name do + m.entries.each do |t| + specify "#{t.property('@id')}: #{t.name}#{' (negative test)' unless t.positiveTest?}" do + pending "Generalized RDF" if t.options[:produceGeneralizedRdf] + pending "@nest defining @id" if %w[#tin06].include?(t.property('@id')) + pending "double @reverse" if %w[#te043].include?(t.property('@id')) + pending "graph map containing named graph" if %w[#te084 #te087 #te098 #te101 #te105 + #te106].include?(t.property('@id')) + pending "named graphs" if %w[#t0029 #te021].include?(t.property('@id')) + + if %w[#t0118].include?(t.property('@id')) + expect { t.run self }.to write(/Statement .* is invalid/).to(:error) + elsif %w[#twf07].include?(t.property('@id')) + expect { t.run self }.to write(/skipping graph statement within invalid graph name/).to(:error) + elsif %w[#te075].include?(t.property('@id')) + expect { t.run self }.to write(/is invalid/).to(:error) + elsif %w[#te005 #tpr34 #tpr35 #tpr36 #tpr37 #tpr38 #tpr39 #te119 #te120].include?(t.property('@id')) + expect { t.run self }.to write("beginning with '@' are reserved for future use").to(:error) + elsif %w[#te068].include?(t.property('@id')) + expect { t.run self }.to write("[DEPRECATION]").to(:error) + elsif %w[#twf05].include?(t.property('@id')) + expect { t.run self }.to write("@language must be valid BCP47").to(:error) + else + expect { t.run self }.not_to write.to(:error) + end end end end end - end unless ENV['CI'] + end def run_to_rdf(params) input = params[:input] logger.info("input: #{input}") output = RDF::Repository.new if params[:expect] - RDF::NQuads::Reader.new(params[:expect], validate: false) {|r| output << r} + RDF::NQuads::Reader.new(params[:expect], validate: false) { |r| output << r } logger.info("expect (quads): #{output.dump(:nquads, validate: false)}") else logger.info("expect: #{Regexp.new params[:exception]}") end - + graph = params[:graph] || RDF::Repository.new pending params.fetch(:pending, "test implementation") if !input || params[:pending] if params[:exception] expect do |b| JSON::LD::Reader.new(input, stream: true, validate: true, logger: false, **params).each_statement(&b) - end.to raise_error {|er| expect(er.message).to include params[:exception]} + end.to raise_error { |er| expect(er.message).to include params[:exception] } else if params[:write] - expect{JSON::LD::Reader.new(input, stream: true, logger: logger, **params) {|st| graph << st}}.to write(params[:write]).to(:error) + expect do + JSON::LD::Reader.new(input, stream: true, logger: logger, **params) do |st| + graph << st + end + end.to write(params[:write]).to(:error) else - expect{JSON::LD::Reader.new(input, stream: true, logger: logger, **params) {|st| graph << st}}.not_to write.to(:error) + expect do + JSON::LD::Reader.new(input, stream: true, logger: logger, **params) do |st| + graph << st + end + end.not_to write.to(:error) end logger.info("results (quads): #{graph.dump(:nquads, validate: false)}") expect(graph).to be_equivalent_graph(output, logger: logger, inputDocument: input) diff --git a/spec/streaming_writer_spec.rb b/spec/streaming_writer_spec.rb index a4ec0771..ac4ac21f 100644 --- a/spec/streaming_writer_spec.rb +++ b/spec/streaming_writer_spec.rb @@ -1,25 +1,24 @@ -# coding: utf-8 require_relative 'spec_helper' require 'rdf/spec/writer' require 'json/ld/streaming_writer' describe JSON::LD::StreamingWriter do - let(:logger) {RDF::Spec.logger} + let(:logger) { RDF::Spec.logger } - after(:each) {|example| puts logger.to_s if example.exception} + after { |example| puts logger if example.exception } it_behaves_like 'an RDF::Writer' do - let(:writer) {JSON::LD::Writer.new(StringIO.new(""), stream: true)} + let(:writer) { JSON::LD::Writer.new(StringIO.new(""), stream: true) } end context "simple tests" do - it "should use full URIs without base" do + it "uses full URIs without base" do input = %( .) obj = serialize(input) expect(parse(obj.to_json, format: :jsonld)).to be_equivalent_graph(parse(input), logger: logger) expect(obj).to produce_jsonld([{ - '@id' => "http://a/b", - "http://a/c" => [{"@id" => "http://a/d"}] + '@id' => "http://a/b", + "http://a/c" => [{ "@id" => "http://a/d" }] }], logger) end @@ -32,98 +31,101 @@ ) obj = serialize(input) expect(parse(obj.to_json, format: :jsonld)).to be_equivalent_graph(parse(input), logger: logger) - expect(obj).to eql JSON.parse(%{[{ + expect(obj).to eql JSON.parse(%([{ "@id": "https://senet.org/gm", "@type": ["http://vocab.org/frbr/core#Work"], "http://purl.org/dc/terms/title": [{"@value": "Rhythm Paradise", "@language": "en"}], "https://senet.org/ns#unofficialTitle": [{"@value": "Rhythm Tengoku", "@language": "en"}], "https://senet.org/ns#urlkey": [{"@value": "rhythm-tengoku"}] - }]}) + }])) end it "serializes multiple subjects" do - input = %q( + input = ' @prefix : . @prefix dc: . a :TestCase . a :TestCase . - ) + ' obj = serialize(input) expect(parse(obj.to_json, format: :jsonld)).to be_equivalent_graph(parse(input), logger: logger) - expect(obj).to contain_exactly(*JSON.parse(%{[ + expect(obj).to match_array(JSON.parse(%([ {"@id": "http://example.com/test-cases/0001", "@type": ["http://www.w3.org/2006/03/test-description#TestCase"]}, {"@id": "http://example.com/test-cases/0002", "@type": ["http://www.w3.org/2006/03/test-description#TestCase"]} - ]})) + ]))) end end context "Named Graphs" do { "default" => [ - %q({ .}), - %q([{"@id": "a", "b": [{"@id": "c"}]}]) + '{ .}', + '[{"@id": "a", "b": [{"@id": "c"}]}]' ], "named" => [ - %q( { .}), - %q([{"@id" : "C", "@graph" : [{"@id": "a", "b": [{"@id": "c"}]}]}]) + ' { .}', + '[{"@id" : "C", "@graph" : [{"@id": "a", "b": [{"@id": "c"}]}]}]' ], "combo" => [ - %q( + ' . { .} - ), - %q([ + ', + '[ {"@id": "a", "b": [{"@id": "c"}]}, {"@id": "C", "@graph": [{"@id": "A", "b": [{"@id": "c"}]}]} - ]) + ]' ], "combo with duplicated statement" => [ - %q( + ' . { .} - ), - %q([ + ', + '[ {"@id": "a", "b": [{"@id": "c"}]}, {"@id": "C", "@graph": [{"@id": "a", "b": [{"@id": "c"}]}]} - ]) - ], + ]' + ] }.each_pair do |title, (input, matches)| context title do - subject {serialize(input)} + subject { serialize(input) } + it "matches expected json" do - expect(subject).to contain_exactly(*JSON.parse(matches)) + expect(subject).to match_array(JSON.parse(matches)) end end end end + unless ENV['CI'] + context "Writes fromRdf tests to isomorphic graph" do + require 'suite_helper' + m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}fromRdf-manifest.jsonld") + [nil, {}].each do |ctx| + context "with context #{ctx.inspect}" do + describe m.name do + m.entries.each do |t| + next unless t.positiveTest? && !t.property('input').include?('0016') - context "Writes fromRdf tests to isomorphic graph" do - require 'suite_helper' - m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}fromRdf-manifest.jsonld") - [nil, {}].each do |ctx| - context "with context #{ctx.inspect}" do - describe m.name do - m.entries.each do |t| - next unless t.positiveTest? && !t.property('input').include?('0016') - t.logger = RDF::Spec.logger - t.logger.info "test: #{t.inspect}" - t.logger.info "source: #{t.input}" - specify "#{t.property('@id')}: #{t.name}" do - repo = RDF::Repository.load(t.input_loc, format: :nquads) - jsonld = JSON::LD::Writer.buffer(stream: true, context: ctx, logger: t.logger, **t.options) do |writer| - writer << repo - end - t.logger.info "Generated: #{jsonld}" + t.logger = RDF::Spec.logger + t.logger.info "test: #{t.inspect}" + t.logger.info "source: #{t.input}" + specify "#{t.property('@id')}: #{t.name}" do + repo = RDF::Repository.load(t.input_loc, format: :nquads) + jsonld = JSON::LD::Writer.buffer(stream: true, context: ctx, logger: t.logger, **t.options) do |writer| + writer << repo + end + t.logger.info "Generated: #{jsonld}" - # And then, re-generate jsonld as RDF - expect(parse(jsonld, format: :jsonld, **t.options)).to be_equivalent_graph(repo, t) + # And then, re-generate jsonld as RDF + expect(parse(jsonld, format: :jsonld, **t.options)).to be_equivalent_graph(repo, t) + end end end end end end - end unless ENV['CI'] + end def parse(input, format: :trig, **options) reader = RDF::Reader.for(format) @@ -139,7 +141,7 @@ def serialize(ntstr, **options) writer << g end puts result if $verbose - + JSON.parse(result) end end diff --git a/spec/suite_compact_spec.rb b/spec/suite_compact_spec.rb index 9982f21c..cc3ff9bc 100644 --- a/spec/suite_compact_spec.rb +++ b/spec/suite_compact_spec.rb @@ -1,22 +1,23 @@ -# coding: utf-8 require_relative 'spec_helper' -describe JSON::LD do - describe "test suite" do - require_relative 'suite_helper' - m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}compact-manifest.jsonld") - describe m.name do - m.entries.each do |t| - specify "#{t.property('@id')}: #{t.name} unordered#{' (negative test)' unless t.positiveTest?}" do - t.options[:ordered] = false - expect{t.run self}.not_to write.to(:error) - end +unless ENV['CI'] + describe JSON::LD do + describe "test suite" do + require_relative 'suite_helper' + m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}compact-manifest.jsonld") + describe m.name do + m.entries.each do |t| + specify "#{t.property('@id')}: #{t.name} unordered#{' (negative test)' unless t.positiveTest?}" do + t.options[:ordered] = false + expect { t.run self }.not_to write.to(:error) + end - specify "#{t.property('@id')}: #{t.name} ordered#{' (negative test)' unless t.positiveTest?}" do - t.options[:ordered] = true - expect {t.run self}.not_to write.to(:error) + specify "#{t.property('@id')}: #{t.name} ordered#{' (negative test)' unless t.positiveTest?}" do + t.options[:ordered] = true + expect { t.run self }.not_to write.to(:error) + end end end end end -end unless ENV['CI'] \ No newline at end of file +end diff --git a/spec/suite_expand_spec.rb b/spec/suite_expand_spec.rb index 8d26624e..61c832fd 100644 --- a/spec/suite_expand_spec.rb +++ b/spec/suite_expand_spec.rb @@ -1,36 +1,37 @@ -# coding: utf-8 require_relative 'spec_helper' -describe JSON::LD do - describe "test suite" do - require_relative 'suite_helper' - m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}expand-manifest.jsonld") - describe m.name do - m.entries.each do |t| - # MultiJson use OJ, by default, which doesn't handle native numbers the same as the JSON gem. - t.options[:adapter] = :json_gem if %w(#tjs12).include?(t.property('@id')) - specify "#{t.property('@id')}: #{t.name} unordered#{' (negative test)' unless t.positiveTest?}" do - t.options[:ordered] = false - if %w(#t0068).include?(t.property('@id')) - expect{t.run self}.to write("[DEPRECATION]").to(:error) - elsif %w(#t0005 #tpr34 #tpr35 #tpr36 #tpr37 #tpr38 #tpr39 #t0119 #t0120).include?(t.property('@id')) - expect{t.run self}.to write("beginning with '@' are reserved for future use").to(:error) - else - expect {t.run self}.not_to write.to(:error) +unless ENV['CI'] + describe JSON::LD do + describe "test suite" do + require_relative 'suite_helper' + m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}expand-manifest.jsonld") + describe m.name do + m.entries.each do |t| + # MultiJson use OJ, by default, which doesn't handle native numbers the same as the JSON gem. + t.options[:adapter] = :json_gem if %w[#tjs12].include?(t.property('@id')) + specify "#{t.property('@id')}: #{t.name} unordered#{' (negative test)' unless t.positiveTest?}" do + t.options[:ordered] = false + if %w[#t0068].include?(t.property('@id')) + expect { t.run self }.to write("[DEPRECATION]").to(:error) + elsif %w[#t0005 #tpr34 #tpr35 #tpr36 #tpr37 #tpr38 #tpr39 #t0119 #t0120].include?(t.property('@id')) + expect { t.run self }.to write("beginning with '@' are reserved for future use").to(:error) + else + expect { t.run self }.not_to write.to(:error) + end end - end - specify "#{t.property('@id')}: #{t.name} ordered#{' (negative test)' unless t.positiveTest?}" do - t.options[:ordered] = true - if %w(#t0068).include?(t.property('@id')) - expect{t.run self}.to write("[DEPRECATION]").to(:error) - elsif %w(#t0005 #tpr34 #tpr35 #tpr36 #tpr37 #tpr38 #tpr39 #t0119 #t0120).include?(t.property('@id')) - expect{t.run self}.to write("beginning with '@' are reserved for future use").to(:error) - else - expect {t.run self}.not_to write.to(:error) + specify "#{t.property('@id')}: #{t.name} ordered#{' (negative test)' unless t.positiveTest?}" do + t.options[:ordered] = true + if %w[#t0068].include?(t.property('@id')) + expect { t.run self }.to write("[DEPRECATION]").to(:error) + elsif %w[#t0005 #tpr34 #tpr35 #tpr36 #tpr37 #tpr38 #tpr39 #t0119 #t0120].include?(t.property('@id')) + expect { t.run self }.to write("beginning with '@' are reserved for future use").to(:error) + else + expect { t.run self }.not_to write.to(:error) + end end end end end end -end unless ENV['CI'] \ No newline at end of file +end diff --git a/spec/suite_flatten_spec.rb b/spec/suite_flatten_spec.rb index f2ed27f6..5c34d401 100644 --- a/spec/suite_flatten_spec.rb +++ b/spec/suite_flatten_spec.rb @@ -1,34 +1,36 @@ -# coding: utf-8 require_relative 'spec_helper' -describe JSON::LD do - describe "test suite" do - require_relative 'suite_helper' - m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}flatten-manifest.jsonld") - describe m.name do - m.entries.each do |t| - t.options[:remap_bnodes] = %w(#t0045).include?(t.property('@id')) +unless ENV['CI'] + describe JSON::LD do + describe "test suite" do + require_relative 'suite_helper' + m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}flatten-manifest.jsonld") + describe m.name do + m.entries.each do |t| + t.options[:remap_bnodes] = %w[#t0045].include?(t.property('@id')) - specify "#{t.property('@id')}: #{t.name} unordered#{' (negative test)' unless t.positiveTest?}" do - t.options[:ordered] = false - if %w(#t0005).include?(t.property('@id')) - expect{t.run self}.to write("Terms beginning with '@' are reserved for future use").to(:error) - else - expect {t.run self}.not_to write.to(:error) + specify "#{t.property('@id')}: #{t.name} unordered#{' (negative test)' unless t.positiveTest?}" do + t.options[:ordered] = false + if %w[#t0005].include?(t.property('@id')) + expect { t.run self }.to write("Terms beginning with '@' are reserved for future use").to(:error) + else + expect { t.run self }.not_to write.to(:error) + end end - end - # Skip ordered tests when remapping bnodes - next if t.options[:remap_bnodes] - specify "#{t.property('@id')}: #{t.name} ordered#{' (negative test)' unless t.positiveTest?}" do - t.options[:ordered] = true - if %w(#t0005).include?(t.property('@id')) - expect{t.run self}.to write("Terms beginning with '@' are reserved for future use").to(:error) - else - expect {t.run self}.not_to write.to(:error) + # Skip ordered tests when remapping bnodes + next if t.options[:remap_bnodes] + + specify "#{t.property('@id')}: #{t.name} ordered#{' (negative test)' unless t.positiveTest?}" do + t.options[:ordered] = true + if %w[#t0005].include?(t.property('@id')) + expect { t.run self }.to write("Terms beginning with '@' are reserved for future use").to(:error) + else + expect { t.run self }.not_to write.to(:error) + end end end end end end -end unless ENV['CI'] \ No newline at end of file +end diff --git a/spec/suite_frame_spec.rb b/spec/suite_frame_spec.rb index 79c314ce..007851f9 100644 --- a/spec/suite_frame_spec.rb +++ b/spec/suite_frame_spec.rb @@ -1,29 +1,29 @@ -# coding: utf-8 require_relative 'spec_helper' -describe JSON::LD do - describe "test suite" do - require_relative 'suite_helper' - m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::FRAME_SUITE}frame-manifest.jsonld") - describe m.name do - m.entries.each do |t| - t.options[:remap_bnodes] = %w(#t0021 #tp021).include?(t.property('@id')) +unless ENV['CI'] + describe JSON::LD do + describe "test suite" do + require_relative 'suite_helper' + m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::FRAME_SUITE}frame-manifest.jsonld") + describe m.name do + m.entries.each do |t| + t.options[:remap_bnodes] = %w[#t0021 #tp021].include?(t.property('@id')) - specify "#{t.property('@id')}: #{t.name} unordered#{' (negative test)' unless t.positiveTest?}" do - t.options[:ordered] = false - expect {t.run self}.not_to write.to(:error) - end + specify "#{t.property('@id')}: #{t.name} unordered#{' (negative test)' unless t.positiveTest?}" do + t.options[:ordered] = false + expect { t.run self }.not_to write.to(:error) + end + + # Skip ordered tests when remapping bnodes + next if t.options[:remap_bnodes] - # Skip ordered tests when remapping bnodes - next if t.options[:remap_bnodes] - specify "#{t.property('@id')}: #{t.name} ordered#{' (negative test)' unless t.positiveTest?}" do - t.options[:ordered] = true - if %w(#tp021).include?(t.property('@id')) - pending("changes due to blank node reordering") + specify "#{t.property('@id')}: #{t.name} ordered#{' (negative test)' unless t.positiveTest?}" do + t.options[:ordered] = true + pending("changes due to blank node reordering") if %w[#tp021].include?(t.property('@id')) + expect { t.run self }.not_to write.to(:error) end - expect {t.run self}.not_to write.to(:error) end end end end -end unless ENV['CI'] \ No newline at end of file +end diff --git a/spec/suite_from_rdf_spec.rb b/spec/suite_from_rdf_spec.rb index 90324d49..1dec141d 100644 --- a/spec/suite_from_rdf_spec.rb +++ b/spec/suite_from_rdf_spec.rb @@ -1,22 +1,23 @@ -# coding: utf-8 require_relative 'spec_helper' -describe JSON::LD do - describe "test suite" do - require_relative 'suite_helper' - m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}fromRdf-manifest.jsonld") - describe m.name do - m.entries.each do |t| - specify "#{t.property('@id')}: #{t.name} unordered#{' (negative test)' unless t.positiveTest?}" do - t.options[:ordered] = false - expect {t.run self}.not_to write.to(:error) - end +unless ENV['CI'] + describe JSON::LD do + describe "test suite" do + require_relative 'suite_helper' + m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}fromRdf-manifest.jsonld") + describe m.name do + m.entries.each do |t| + specify "#{t.property('@id')}: #{t.name} unordered#{' (negative test)' unless t.positiveTest?}" do + t.options[:ordered] = false + expect { t.run self }.not_to write.to(:error) + end - specify "#{t.property('@id')}: #{t.name} ordered#{' (negative test)' unless t.positiveTest?}" do - t.options[:ordered] = true - expect {t.run self}.not_to write.to(:error) + specify "#{t.property('@id')}: #{t.name} ordered#{' (negative test)' unless t.positiveTest?}" do + t.options[:ordered] = true + expect { t.run self }.not_to write.to(:error) + end end end end end -end unless ENV['CI'] \ No newline at end of file +end diff --git a/spec/suite_helper.rb b/spec/suite_helper.rb index 51b83141..35fb8717 100644 --- a/spec/suite_helper.rb +++ b/spec/suite_helper.rb @@ -4,15 +4,16 @@ module RDF::Util module File LOCAL_PATHS = { - "https://w3c.github.io/json-ld-api/tests/" => ::File.expand_path("../json-ld-api/tests", __FILE__) + '/', - "https://w3c.github.io/json-ld-framing/tests/" => ::File.expand_path("../json-ld-framing/tests", __FILE__) + '/', - "https://w3c.github.io/json-ld-streaming/tests/" => ::File.expand_path("../json-ld-streaming/tests", __FILE__) + '/', - "https://json-ld.github.io/json-ld-star/tests/" => ::File.expand_path("../json-ld-star/tests", __FILE__) + '/', + "https://w3c.github.io/json-ld-api/tests/" => ::File.expand_path('json-ld-api/tests', __dir__) + '/', + "https://w3c.github.io/json-ld-framing/tests/" => ::File.expand_path('json-ld-framing/tests', __dir__) + '/', + "https://w3c.github.io/json-ld-streaming/tests/" => ::File.expand_path('json-ld-streaming/tests', + __dir__) + '/', + "https://json-ld.github.io/json-ld-star/tests/" => ::File.expand_path('json-ld-star/tests', __dir__) + '/', "file:" => "" } class << self - alias_method :original_open_file, :open_file + alias original_open_file open_file end ## @@ -27,7 +28,8 @@ class << self def self.open_file(filename_or_url, **options, &block) LOCAL_PATHS.each do |r, l| next unless Dir.exist?(l) && filename_or_url.start_with?(r) - #puts "attempt to open #{filename_or_url} locally" + + # puts "attempt to open #{filename_or_url} locally" url_no_frag_or_query = RDF::URI(filename_or_url).dup url_no_frag_or_query.query = nil url_no_frag_or_query.fragment = nil @@ -39,12 +41,12 @@ def self.open_file(filename_or_url, **options, &block) end document_options = { - base_uri: RDF::URI(filename_or_url), - charset: Encoding::UTF_8, - code: 200, - headers: options.fetch(:headers, {}) + base_uri: RDF::URI(filename_or_url), + charset: Encoding::UTF_8, + code: 200, + headers: options.fetch(:headers, {}) } - #puts "use #{filename_or_url} locally" + # puts "use #{filename_or_url} locally" document_options[:headers][:content_type] = case localpath when /\.ttl$/ then 'text/turtle' when /\.nq$/ then 'application/n-quads' @@ -61,11 +63,9 @@ def self.open_file(filename_or_url, **options, &block) remote_document = RDF::Util::File::RemoteDocument.new(response.read, **document_options) response.close - if block_given? - return yield remote_document - else - return remote_document - end + return yield remote_document if block + + return remote_document end original_open_file(filename_or_url, **options, &block) @@ -87,9 +87,9 @@ def self.open(file) RDF::Util::File.open_file(file) do |remote| json = JSON.parse(remote.read) if block_given? - yield self.from_jsonld(json, manifest_url: RDF::URI(file)) + yield from_jsonld(json, manifest_url: RDF::URI(file)) else - self.from_jsonld(json, manifest_url: RDF::URI(file)) + from_jsonld(json, manifest_url: RDF::URI(file)) end end end @@ -101,7 +101,7 @@ def initialize(json, manifest_url:) # @param [Hash] json framed JSON-LD # @return [Array] - def self.from_jsonld(json, manifest_url: ) + def self.from_jsonld(json, manifest_url:) Manifest.new(json, manifest_url: manifest_url) end @@ -114,8 +114,7 @@ def entries end class Entry < JSON::LD::Resource - attr_accessor :logger - attr_accessor :manifest_url + attr_accessor :logger, :manifest_url def initialize(json, manifest_url:) @manifest_url = manifest_url @@ -132,9 +131,9 @@ def options opts = { documentLoader: Fixtures::SuiteTest.method(:documentLoader), validate: true, - lowercaseLanguage: true, + lowercaseLanguage: true } - {'specVersion' => "json-ld-1.1"}.merge(property('option') || {}).each do |k, v| + { 'specVersion' => "json-ld-1.1" }.merge(property('option') || {}).each do |k, v| opts[k.to_sym] = v end if opts[:expandContext] && !RDF::URI(opts[:expandContext]).absolute? @@ -146,13 +145,14 @@ def options end # Alias input, context, expect and frame - %w(input context expect frame).each do |m| + %w[input context expect frame].each do |m| define_method(m.to_sym) do return nil unless property(m) + res = nil - file = self.send("#{m}_loc".to_sym) + file = send("#{m}_loc".to_sym) - dl_opts = {safe: true} + dl_opts = { safe: true } dl_opts[:contentType] = options[:contentType] if m == 'input' && options[:contentType] RDF::Util::File.open_file(file, **dl_opts) do |remote_doc| res = remote_doc.read @@ -164,20 +164,18 @@ def options file = property(m) # Handle redirection internally - if m == "input" && options[:redirectTo] - file = options[:redirectTo] - end + file = options[:redirectTo] if m == "input" && options[:redirectTo] property(m) && manifest_url.join(file).to_s end define_method("#{m}_json".to_sym) do - JSON.parse(self.send(m)) if property(m) + JSON.parse(send(m)) if property(m) end end def testType - property('@type').reject {|t| t =~ /EvaluationTest|SyntaxTest/}.first + property('@type').reject { |t| t =~ /EvaluationTest|SyntaxTest/ }.first end def evaluationTest? @@ -191,21 +189,24 @@ def positiveTest? def syntaxTest? property('@type').to_s.include?('Syntax') end - # Execute the test def run(rspec_example = nil) logger = @logger = RDF::Spec.logger logger.info "test: #{inspect}" logger.info "purpose: #{purpose}" - logger.info "source: #{input rescue nil}" + logger.info "source: #{begin + input + rescue StandardError + nil + end}" logger.info "context: #{context}" if context_loc logger.info "options: #{options.inspect}" unless options.empty? logger.info "frame: #{frame}" if frame_loc options = self.options if options[:specVersion] == "json-ld-1.0" - skip "1.0 test" + skip "1.0 test" return end @@ -213,7 +214,13 @@ def run(rspec_example = nil) options[:lowercaseLanguage] = true if options[:ordered] if positiveTest? - logger.info "expected: #{expect rescue nil}" if expect_loc + if expect_loc + logger.info "expected: #{begin + expect + rescue StandardError + nil + end}" + end begin result = case testType when "jld:ExpandTest" @@ -229,7 +236,7 @@ def run(rspec_example = nil) repo = RDF::NQuads::Reader.open(input_loc, rdfstar: options[:rdfstar]) do |reader| reader.each_statement.to_a end.to_a.uniq.extend(RDF::Enumerable) - logger.info "repo: #{repo.dump(self.id == '#t0012' ? :nquads : :trig)}" + logger.info "repo: #{repo.dump(id == '#t0012' ? :nquads : :trig)}" JSON::LD::API.fromRdf(repo, logger: logger, **options) when "jld:ToRDFTest" repo = RDF::Repository.new @@ -249,20 +256,22 @@ def run(rspec_example = nil) rspec_example.instance_eval do # use the parsed input file as @result for Rack Test application @results = res - get "/", {}, "HTTP_ACCEPT" => options.fetch(:httpAccept, ""), "HTTP_LINK" => options.fetch(:httpLink, nil) + get "/", {}, "HTTP_ACCEPT" => options.fetch(:httpAccept, ""), + "HTTP_LINK" => options.fetch(:httpLink, nil) expect(last_response.status).to eq 200 expect(last_response.content_type).to eq options.fetch(:contentType, "") JSON.parse(last_response.body) end else - fail("Unknown test type: #{testType}") + raise("Unknown test type: #{testType}") end if evaluationTest? if testType == "jld:ToRDFTest" - expected = RDF::Repository.new << RDF::NQuads::Reader.new(expect, rdfstar: options[:rdfstar], logger: []) - rspec_example.instance_eval { + expected = RDF::Repository.new << RDF::NQuads::Reader.new(expect, rdfstar: options[:rdfstar], + logger: []) + rspec_example.instance_eval do expect(result).to be_equivalent_graph(expected, logger) - } + end else expected = JSON.load(expect) @@ -271,76 +280,75 @@ def run(rspec_example = nil) if options[:ordered] # Compare without transformation - rspec_example.instance_eval { + rspec_example.instance_eval do expect(result).to produce(expected, logger) - } + end else # Without key ordering, reorder result and expected embedded array values and compare # If results are compacted, expand both, reorder and re-compare - rspec_example.instance_eval { + rspec_example.instance_eval do expect(result).to produce_jsonld(expected, logger) - } + end # If results are compacted, expand both, reorder and re-compare if result.to_s.include?('@context') exp_expected = JSON::LD::API.expand(expected, **options) exp_result = JSON::LD::API.expand(result, **options) - rspec_example.instance_eval { + rspec_example.instance_eval do expect(exp_result).to produce_jsonld(exp_expected, logger) - } + end end end end else - rspec_example.instance_eval { - expect(result).to_not be_nil - } + rspec_example.instance_eval do + expect(result).not_to be_nil + end end rescue JSON::LD::JsonLdError => e - fail("Processing error: #{e.message}") + raise("Processing error: #{e.message}") end else logger.info "expected: #{property('expect')}" if property('expect') t = self rspec_example.instance_eval do - if t.evaluationTest? - expect do - case t.testType - when "jld:ExpandTest" - JSON::LD::API.expand(t.input_loc, logger: logger, **options) - when "jld:CompactTest" - JSON::LD::API.compact(t.input_loc, t.context_json['@context'], logger: logger, **options) - when "jld:FlattenTest" - JSON::LD::API.flatten(t.input_loc, t.context_loc, logger: logger, **options) - when "jld:FrameTest" - JSON::LD::API.frame(t.input_loc, t.frame_loc, logger: logger, **options) - when "jld:FromRDFTest" - repo = RDF::Repository.load(t.input_loc, rdfstar: options[:rdfstar]) - logger.info "repo: #{repo.dump(t.id == '#t0012' ? :nquads : :trig)}" - JSON::LD::API.fromRdf(repo, logger: logger, **options) - when "jld:HttpTest" - rspec_example.instance_eval do - # use the parsed input file as @result for Rack Test application - @results = t.input_json - get "/", {}, "HTTP_ACCEPT" => options.fetch(:httpAccept, "") - expect(last_response.status).to eq t.property('expect') - expect(last_response.content_type).to eq options.fetch(:contentType, "") - raise "406" if t.property('expect') == 406 - raise "Expected status #{t.property('expectErrorCode')}, not #{last_response.status}" - end - when "jld:ToRDFTest" - if t.manifest_url.to_s.include?('stream') - JSON::LD::Reader.open(t.input_loc, stream: true, logger: logger, **options).each_statement {} - else - JSON::LD::API.toRdf(t.input_loc, rename_bnodes: false, logger: logger, **options) {} - end + raise("No support for NegativeSyntaxTest") unless t.evaluationTest? + + expect do + case t.testType + when "jld:ExpandTest" + JSON::LD::API.expand(t.input_loc, logger: logger, **options) + when "jld:CompactTest" + JSON::LD::API.compact(t.input_loc, t.context_json['@context'], logger: logger, **options) + when "jld:FlattenTest" + JSON::LD::API.flatten(t.input_loc, t.context_loc, logger: logger, **options) + when "jld:FrameTest" + JSON::LD::API.frame(t.input_loc, t.frame_loc, logger: logger, **options) + when "jld:FromRDFTest" + repo = RDF::Repository.load(t.input_loc, rdfstar: options[:rdfstar]) + logger.info "repo: #{repo.dump(t.id == '#t0012' ? :nquads : :trig)}" + JSON::LD::API.fromRdf(repo, logger: logger, **options) + when "jld:HttpTest" + rspec_example.instance_eval do + # use the parsed input file as @result for Rack Test application + @results = t.input_json + get "/", {}, "HTTP_ACCEPT" => options.fetch(:httpAccept, "") + expect(last_response.status).to eq t.property('expect') + expect(last_response.content_type).to eq options.fetch(:contentType, "") + raise "406" if t.property('expect') == 406 + + raise "Expected status #{t.property('expectErrorCode')}, not #{last_response.status}" + end + when "jld:ToRDFTest" + if t.manifest_url.to_s.include?('stream') + JSON::LD::Reader.open(t.input_loc, stream: true, logger: logger, **options).each_statement {} else - success("Unknown test type: #{testType}") + JSON::LD::API.toRdf(t.input_loc, rename_bnodes: false, logger: logger, **options) {} end - end.to raise_error(/#{t.property('expectErrorCode')}/) - else - fail("No support for NegativeSyntaxTest") - end + else + success("Unknown test type: #{testType}") + end + end.to raise_error(/#{t.property('expectErrorCode')}/) end end end @@ -365,7 +373,7 @@ def to_quad(thing) v += "@#{thing.language}" if thing.language v when RDF::Statement - thing.to_quad.map {|r| to_quad(r)}.compact.join(" ") + " .\n" + thing.to_quad.map { |r| to_quad(r) }.compact.join(" ") + " .\n" end end @@ -380,8 +388,8 @@ def quoted(string) # @param [String, #to_s] string # @return [String] def escaped(string) - string.to_s.gsub('\\', '\\\\').gsub("\t", '\\t'). - gsub("\n", '\\n').gsub("\r", '\\r').gsub('"', '\\"') + string.to_s.gsub('\\', '\\\\').gsub("\t", '\\t') + .gsub("\n", '\\n').gsub("\r", '\\r').gsub('"', '\\"') end end @@ -399,11 +407,12 @@ def escaped(string) def documentLoader(url, **options, &block) options[:headers] ||= JSON::LD::API::OPEN_OPTS[:headers].dup options[:headers][:link] = Array(options[:httpLink]).join(',') if options[:httpLink] - + url = url.to_s[5..-1] if url.to_s.start_with?("file:") JSON::LD::API.documentLoader(url, **options, &block) rescue JSON::LD::JsonLdError::LoadingDocumentFailed, JSON::LD::JsonLdError::MultipleContextLinkHeaders raise unless options[:safe] + "don't raise error" end module_function :documentLoader diff --git a/spec/suite_html_spec.rb b/spec/suite_html_spec.rb index 0a20bd38..27246613 100644 --- a/spec/suite_html_spec.rb +++ b/spec/suite_html_spec.rb @@ -1,22 +1,23 @@ -# coding: utf-8 require_relative 'spec_helper' -describe JSON::LD do - describe "test suite" do - require_relative 'suite_helper' - m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}html-manifest.jsonld") - describe m.name do - m.entries.each do |t| - specify "#{t.property('@id')}: #{t.name} unordered#{' (negative test)' unless t.positiveTest?}" do - t.options[:ordered] = false - expect {t.run self}.not_to write.to(:error) - end +unless ENV['CI'] + describe JSON::LD do + describe "test suite" do + require_relative 'suite_helper' + m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}html-manifest.jsonld") + describe m.name do + m.entries.each do |t| + specify "#{t.property('@id')}: #{t.name} unordered#{' (negative test)' unless t.positiveTest?}" do + t.options[:ordered] = false + expect { t.run self }.not_to write.to(:error) + end - specify "#{t.property('@id')}: #{t.name} ordered#{' (negative test)' unless t.positiveTest?}" do - t.options[:ordered] = true - expect {t.run self}.not_to write.to(:error) + specify "#{t.property('@id')}: #{t.name} ordered#{' (negative test)' unless t.positiveTest?}" do + t.options[:ordered] = true + expect { t.run self }.not_to write.to(:error) + end end end end end -end unless ENV['CI'] \ No newline at end of file +end diff --git a/spec/suite_http_spec.rb b/spec/suite_http_spec.rb index f573870c..14a6a04d 100644 --- a/spec/suite_http_spec.rb +++ b/spec/suite_http_spec.rb @@ -1,35 +1,37 @@ -# coding: utf-8 require_relative 'spec_helper' require 'rack/linkeddata' require 'rack/test' begin - describe JSON::LD do - describe "test suite" do - require_relative 'suite_helper' - m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}http-manifest.jsonld") - describe m.name do - include ::Rack::Test::Methods - before(:all) {JSON::LD::Writer.default_context = "#{Fixtures::SuiteTest::SUITE}http/default-context.jsonld"} - after(:all) {JSON::LD::Writer.default_context = nil} - let(:app) do - JSON::LD::ContentNegotiation.new( - Rack::LinkedData::ContentNegotiation.new( - double("Target Rack Application", :call => [200, {}, @results]), - {} + unless ENV['CI'] + describe JSON::LD do + describe "test suite" do + require_relative 'suite_helper' + m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}http-manifest.jsonld") + describe m.name do + include Rack::Test::Methods + before(:all) { JSON::LD::Writer.default_context = "#{Fixtures::SuiteTest::SUITE}http/default-context.jsonld" } + after(:all) { JSON::LD::Writer.default_context = nil } + + let(:app) do + JSON::LD::ContentNegotiation.new( + Rack::LinkedData::ContentNegotiation.new( + double("Target Rack Application", :call => [200, {}, @results]), + {} + ) ) - ) - end + end - m.entries.each do |t| - specify "#{t.property('@id')}: #{t.name} unordered#{' (negative test)' unless t.positiveTest?}" do - t.options[:ordered] = false - expect {t.run self}.not_to write.to(:error) + m.entries.each do |t| + specify "#{t.property('@id')}: #{t.name} unordered#{' (negative test)' unless t.positiveTest?}" do + t.options[:ordered] = false + expect { t.run self }.not_to write.to(:error) + end end end end end - end unless ENV['CI'] + end rescue IOError # Skip this until such a test suite is re-added -end \ No newline at end of file +end diff --git a/spec/suite_remote_doc_spec.rb b/spec/suite_remote_doc_spec.rb index 2341da5d..012a843c 100644 --- a/spec/suite_remote_doc_spec.rb +++ b/spec/suite_remote_doc_spec.rb @@ -1,22 +1,23 @@ -# coding: utf-8 require_relative 'spec_helper' -describe JSON::LD do - describe "test suite" do - require_relative 'suite_helper' - m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}remote-doc-manifest.jsonld") - describe m.name do - m.entries.each do |t| - specify "#{t.property('@id')}: #{t.name} unordered#{' (negative test)' unless t.positiveTest?}" do - t.options[:ordered] = false - expect {t.run self}.not_to write.to(:error) - end +unless ENV['CI'] + describe JSON::LD do + describe "test suite" do + require_relative 'suite_helper' + m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}remote-doc-manifest.jsonld") + describe m.name do + m.entries.each do |t| + specify "#{t.property('@id')}: #{t.name} unordered#{' (negative test)' unless t.positiveTest?}" do + t.options[:ordered] = false + expect { t.run self }.not_to write.to(:error) + end - specify "#{t.property('@id')}: #{t.name} ordered#{' (negative test)' unless t.positiveTest?}" do - t.options[:ordered] = true - expect {t.run self}.not_to write.to(:error) + specify "#{t.property('@id')}: #{t.name} ordered#{' (negative test)' unless t.positiveTest?}" do + t.options[:ordered] = true + expect { t.run self }.not_to write.to(:error) + end end end end end -end unless ENV['CI'] \ No newline at end of file +end diff --git a/spec/suite_to_rdf_spec.rb b/spec/suite_to_rdf_spec.rb index 48f664d0..c003cd1c 100644 --- a/spec/suite_to_rdf_spec.rb +++ b/spec/suite_to_rdf_spec.rb @@ -1,30 +1,31 @@ -# coding: utf-8 require_relative 'spec_helper' -describe JSON::LD do - describe "test suite" do - require_relative 'suite_helper' - m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}toRdf-manifest.jsonld") - describe m.name do - m.entries.each do |t| - specify "#{t.property('@id')}: #{t.name}#{' (negative test)' unless t.positiveTest?}" do - pending "Generalized RDF" if t.options[:produceGeneralizedRdf] - pending "RDF-star" if t.property('@id') == '#te122' - if %w(#t0118).include?(t.property('@id')) - expect {t.run self}.to write(/Statement .* is invalid/).to(:error) - elsif %w(#te075).include?(t.property('@id')) - expect {t.run self}.to write(/is invalid/).to(:error) - elsif %w(#te005 #tpr34 #tpr35 #tpr36 #tpr37 #tpr38 #tpr39 #te119 #te120).include?(t.property('@id')) - expect {t.run self}.to write("beginning with '@' are reserved for future use").to(:error) - elsif %w(#te068).include?(t.property('@id')) - expect {t.run self}.to write("[DEPRECATION]").to(:error) - elsif %w(#twf05).include?(t.property('@id')) - expect {t.run self}.to write("@language must be valid BCP47").to(:error) - else - expect {t.run self}.not_to write.to(:error) +unless ENV['CI'] + describe JSON::LD do + describe "test suite" do + require_relative 'suite_helper' + m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}toRdf-manifest.jsonld") + describe m.name do + m.entries.each do |t| + specify "#{t.property('@id')}: #{t.name}#{' (negative test)' unless t.positiveTest?}" do + pending "Generalized RDF" if t.options[:produceGeneralizedRdf] + pending "RDF-star" if t.property('@id') == '#te122' + if %w[#t0118].include?(t.property('@id')) + expect { t.run self }.to write(/Statement .* is invalid/).to(:error) + elsif %w[#te075].include?(t.property('@id')) + expect { t.run self }.to write(/is invalid/).to(:error) + elsif %w[#te005 #tpr34 #tpr35 #tpr36 #tpr37 #tpr38 #tpr39 #te119 #te120].include?(t.property('@id')) + expect { t.run self }.to write("beginning with '@' are reserved for future use").to(:error) + elsif %w[#te068].include?(t.property('@id')) + expect { t.run self }.to write("[DEPRECATION]").to(:error) + elsif %w[#twf05].include?(t.property('@id')) + expect { t.run self }.to write("@language must be valid BCP47").to(:error) + else + expect { t.run self }.not_to write.to(:error) + end end end end end end -end unless ENV['CI'] \ No newline at end of file +end diff --git a/spec/support/extensions.rb b/spec/support/extensions.rb index dc352a3d..322ad54e 100644 --- a/spec/support/extensions.rb +++ b/spec/support/extensions.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class Object def equivalent_jsonld?(other, ordered: false) self == other @@ -7,6 +9,7 @@ def equivalent_jsonld?(other, ordered: false) class Hash def equivalent_jsonld?(other, ordered: false) return false unless other.is_a?(Hash) && other.length == length + all? do |key, value| # List values are still ordered if key == '@language' && value.is_a?(String) @@ -18,11 +21,8 @@ def equivalent_jsonld?(other, ordered: false) end def diff(other) - self.keys.inject({}) do |memo, key| - unless self[key] == other[key] - memo[key] = [self[key], other[key]] - end - memo + keys.each_with_object({}) do |key, memo| + memo[key] = [self[key], other[key]] unless self[key] == other[key] end end end @@ -30,14 +30,15 @@ def diff(other) class Array def equivalent_jsonld?(other, ordered: false) return false unless other.is_a?(Array) && other.length == length + if ordered b = other.dup # All elements must match in order - all? {|av| av.equivalent_jsonld?(b.shift)} + all? { |av| av.equivalent_jsonld?(b.shift) } else # Look for any element which matches all? do |av| - other.any? {|bv| av.equivalent_jsonld?(bv)} + other.any? { |bv| av.equivalent_jsonld?(bv) } end end end diff --git a/spec/to_rdf_spec.rb b/spec/to_rdf_spec.rb index e5fe1c0f..c1b48f43 100644 --- a/spec/to_rdf_spec.rb +++ b/spec/to_rdf_spec.rb @@ -1,36 +1,37 @@ -# coding: utf-8 +# frozen_string_literal: true + require_relative 'spec_helper' describe JSON::LD::API do - let(:logger) {RDF::Spec.logger} + let(:logger) { RDF::Spec.logger } - context ".toRdf" do - it "should implement RDF::Enumerable" do - expect(JSON::LD::API.toRdf({})).to be_a(RDF::Enumerable) + describe ".toRdf" do + it "implements RDF::Enumerable" do + expect(described_class.toRdf({})).to be_a(RDF::Enumerable) end context "unnamed nodes" do { "no @id" => [ - %q({ + '{ "http://example.com/foo": "bar" - }), - %q([ "bar"^^xsd:string] .) + }', + '[ "bar"^^xsd:string] .' ], "@id with _:a" => [ - %q({ + '{ "@id": "_:a", "http://example.com/foo": "bar" - }), - %q([ "bar"^^xsd:string] .) + }', + '[ "bar"^^xsd:string] .' ], "@id with _:a and reference" => [ - %q({ + '{ "@id": "_:a", "http://example.com/foo": {"@id": "_:a"} - }), - %q(_:a _:a .) - ], + }', + '_:a _:a .' + ] }.each do |title, (js, ttl)| it title do ttl = "@prefix xsd: . #{ttl}" @@ -42,19 +43,19 @@ context "nodes with @id" do { "with IRI" => [ - %q({ + '{ "@id": "http://example.com/a", "http://example.com/foo": "bar" - }), - %q( "bar" .) - ], + }', + ' "bar" .' + ] }.each do |title, (js, ttl)| it title do ttl = "@prefix xsd: . #{ttl}" expect(parse(js)).to be_equivalent_graph(ttl, logger: logger, inputDocument: js) end end - + context "with relative IRIs" do { "base" => [ @@ -77,11 +78,12 @@ "@type": "#{RDF::RDFS.Resource}" }), %( a <#{RDF::RDFS.Resource}> .) - ], + ] }.each do |title, (js, ttl)| it title do ttl = "@prefix xsd: . #{ttl}" - expect(parse(js, base: "http://example.org/")).to be_equivalent_graph(ttl, logger: logger, inputDocument: js) + expect(parse(js, + base: "http://example.org/")).to be_equivalent_graph(ttl, logger: logger, inputDocument: js) end end end @@ -90,22 +92,22 @@ context "typed nodes" do { "one type" => [ - %q({ + '{ "@type": "http://example.com/foo" - }), - %q([ a ] .) + }', + '[ a ] .' ], "two types" => [ - %q({ + '{ "@type": ["http://example.com/foo", "http://example.com/baz"] - }), - %q([ a , ] .) + }', + '[ a , ] .' ], "blank node type" => [ - %q({ + '{ "@type": "_:foo" - }), - %q([ a _:foo ] .) + }', + '[ a _:foo ] .' ] }.each do |title, (js, ttl)| it title do @@ -118,29 +120,29 @@ context "key/value" do { "string" => [ - %q({ + '{ "http://example.com/foo": "bar" - }), - %q([ "bar"^^xsd:string ] .) + }', + '[ "bar"^^xsd:string ] .' ], "strings" => [ - %q({ + '{ "http://example.com/foo": ["bar", "baz"] - }), - %q([ "bar"^^xsd:string, "baz"^^xsd:string ] .) + }', + '[ "bar"^^xsd:string, "baz"^^xsd:string ] .' ], "IRI" => [ - %q({ + '{ "http://example.com/foo": {"@id": "http://example.com/bar"} - }), - %q([ ] .) + }', + '[ ] .' ], "IRIs" => [ - %q({ + '{ "http://example.com/foo": [{"@id": "http://example.com/bar"}, {"@id": "http://example.com/baz"}] - }), - %q([ , ] .) - ], + }', + '[ , ] .' + ] }.each do |title, (js, ttl)| it title do ttl = "@prefix xsd: . #{ttl}" @@ -153,28 +155,28 @@ { "plain literal" => [ - %q({"@id": "http://greggkellogg.net/foaf#me", "http://xmlns.com/foaf/0.1/name": "Gregg Kellogg"}), - %q( "Gregg Kellogg" .) + '{"@id": "http://greggkellogg.net/foaf#me", "http://xmlns.com/foaf/0.1/name": "Gregg Kellogg"}', + ' "Gregg Kellogg" .' ], "explicit plain literal" => [ - %q({"http://xmlns.com/foaf/0.1/name": {"@value": "Gregg Kellogg"}}), - %q(_:a "Gregg Kellogg"^^xsd:string .) + '{"http://xmlns.com/foaf/0.1/name": {"@value": "Gregg Kellogg"}}', + '_:a "Gregg Kellogg"^^xsd:string .' ], "language tagged literal" => [ - %q({"http://www.w3.org/2000/01/rdf-schema#label": {"@value": "A plain literal with a lang tag.", "@language": "en-us"}}), - %q(_:a "A plain literal with a lang tag."@en-us .) + '{"http://www.w3.org/2000/01/rdf-schema#label": {"@value": "A plain literal with a lang tag.", "@language": "en-us"}}', + '_:a "A plain literal with a lang tag."@en-us .' ], "I18N literal with language" => [ - %q([{ + '[{ "@id": "http://greggkellogg.net/foaf#me", "http://xmlns.com/foaf/0.1/knows": {"@id": "http://www.ivan-herman.net/foaf#me"} },{ "@id": "http://www.ivan-herman.net/foaf#me", "http://xmlns.com/foaf/0.1/name": {"@value": "Herman Iván", "@language": "hu"} - }]), + }]', %q( . "Herman Iv\u00E1n"@hu . @@ -182,14 +184,14 @@ ], "explicit datatyped literal" => [ - %q({ + '{ "@id": "http://greggkellogg.net/foaf#me", "http://purl.org/dc/terms/created": {"@value": "1957-02-27", "@type": "http://www.w3.org/2001/XMLSchema#date"} - }), - %q( + }', + ' "1957-02-27"^^ . - ) - ], + ' + ] }.each do |title, (js, ttl)| it title do ttl = "@prefix xsd: . #{ttl}" @@ -199,7 +201,7 @@ context "with @type: @json" do { - "true": { + true => { input: %({ "@context": { "@version": 1.1, @@ -207,13 +209,13 @@ }, "e": true }), - output:%( + output: %( @prefix ex: . @prefix rdf: . [ex:bool "true"^^rdf:JSON] . ) }, - "false": { + false => { input: %({ "@context": { "@version": 1.1, @@ -227,7 +229,7 @@ [ex:bool "false"^^rdf:JSON] . ) }, - "double": { + double: { input: %({ "@context": { "@version": 1.1, @@ -241,7 +243,7 @@ [ex:double "1.23"^^rdf:JSON] . ) }, - "double-zero": { + 'double-zero': { input: %({ "@context": { "@version": 1.1, @@ -255,7 +257,7 @@ [ex:double "0"^^rdf:JSON] . ) }, - "integer": { + integer: { input: %({ "@context": { "@version": 1.1, @@ -269,7 +271,7 @@ [ex:integer "123"^^rdf:JSON] . ) }, - "string": { + string: { input: %({ "@context": { "@version": 1.1, @@ -283,7 +285,7 @@ [ex:string "\\"string\\""^^rdf:JSON] . ) }, - "null": { + null: { input: %({ "@context": { "@version": 1.1, @@ -297,7 +299,7 @@ [ex:null "null"^^rdf:JSON] . ) }, - "object": { + object: { input: %({ "@context": { "@version": 1.1, @@ -311,7 +313,7 @@ [ex:object """{"foo":"bar"}"""^^rdf:JSON] . ) }, - "array": { + array: { input: %({ "@context": { "@version": 1.1, @@ -325,7 +327,7 @@ [ex:array """[{"foo":"bar"}]"""^^rdf:JSON] . ) }, - "c14n-arrays": { + 'c14n-arrays': { input: %({ "@context": { "@version": 1.1, @@ -346,7 +348,7 @@ [ex:c14n """[56,{"1":[],"10":null,"d":true}]"""^^rdf:JSON] . ) }, - "c14n-french": { + 'c14n-french': { input: %({ "@context": { "@version": 1.1, @@ -365,7 +367,7 @@ [ex:c14n """{"peach":"This sorting order","péché":"is wrong according to French","pêche":"but canonicalization MUST","sin":"ignore locale"}"""^^rdf:JSON] . ) }, - "c14n-structures": { + 'c14n-structures': { input: %({ "@context": { "@version": 1.1, @@ -386,7 +388,7 @@ [ex:c14n """{"":"empty","1":{" ":56,"f":{"F":5,"f":"hi"}},"10":{},"111":[{"E":"no","e":"yes"}],"A":{},"a":{}}"""^^rdf:JSON] . ) }, - "c14n-unicode": { + 'c14n-unicode': { input: %({ "@context": { "@version": 1.1, @@ -401,7 +403,7 @@ @prefix rdf: . [ex:c14n """{"Unnormalized Unicode":"Å"}"""^^rdf:JSON] . ) - }, + } }.each do |title, params| it title do params[:output] = RDF::Graph.new << RDF::Turtle::Reader.new(params[:output]) @@ -412,7 +414,7 @@ context "with xsd: true" do { - "true": { + true => { input: { "@context" => { "e" => "http://example.org/vocab#e" @@ -425,7 +427,7 @@ [ex:e true] . ) }, - "integer": { + integer: { input: { "@context" => { "e" => "http://example.org/vocab#e" @@ -438,7 +440,7 @@ [ex:e 1] . ) }, - "decimal": { + decimal: { input: { "@context" => { "e" => "http://example.org/vocab#e" @@ -451,7 +453,7 @@ [ex:e 1.1] . ) }, - "float": { + float: { input: { "@context" => { "e" => "http://example.org/vocab#e" @@ -465,7 +467,7 @@ [ex:e "1.1e1"^^xsd:float] . ) }, - "double": { + double: { input: { "@context" => { "e" => "http://example.org/vocab#e" @@ -479,7 +481,7 @@ [ex:e 1.1e1] . ) }, - "date": { + date: { input: { "@context" => { "e" => "http://example.org/vocab#e" @@ -493,7 +495,7 @@ [ex:e "2022-08-27"^^xsd:date] . ) }, - "time": { + time: { input: { "@context" => { "e" => "http://example.org/vocab#e" @@ -507,7 +509,7 @@ [ex:e "12:00:00"^^xsd:time] . ) }, - "dateTime": { + dateTime: { input: { "@context" => { "e" => "http://example.org/vocab#e" @@ -521,12 +523,12 @@ [ex:e "2022-08-27T12:00:00"^^xsd:dateTime] . ) }, - "language": { + language: { input: { "@context" => { "e" => "http://example.org/vocab#e" }, - "e" => RDF::Literal.new("language", language: :"en-us") + "e" => RDF::Literal.new("language", language: :'en-us') }, output: %( @prefix ex: . @@ -534,7 +536,7 @@ @prefix xsd: . [ex:e "language"@en-us] . ) - }, + } }.each do |title, params| it title do params[:output] = RDF::Graph.new << RDF::Turtle::Reader.new(params[:output]) @@ -547,12 +549,12 @@ context "prefixes" do { "empty suffix" => [ - %q({"@context": {"prefix": "http://example.com/default#"}, "prefix:": "bar"}), - %q(_:a "bar"^^xsd:string .) + '{"@context": {"prefix": "http://example.com/default#"}, "prefix:": "bar"}', + '_:a "bar"^^xsd:string .' ], "prefix:suffix" => [ - %q({"@context": {"prefix": "http://example.com/default#"}, "prefix:foo": "bar"}), - %q(_:a "bar"^^xsd:string .) + '{"@context": {"prefix": "http://example.com/default#"}, "prefix:foo": "bar"}', + '_:a "bar"^^xsd:string .' ] }.each_pair do |title, (js, ttl)| it title do @@ -565,17 +567,17 @@ context "overriding keywords" do { "'url' for @id, 'a' for @type" => [ - %q({ + '{ "@context": {"url": "@id", "a": "@type", "name": "http://schema.org/name"}, "url": "http://example.com/about#gregg", "a": "http://schema.org/Person", "name": "Gregg Kellogg" - }), - %q( + }', + ' . "Gregg Kellogg"^^xsd:string . - ) - ], + ' + ] }.each do |title, (js, ttl)| it title do ttl = "@prefix xsd: . #{ttl}" @@ -588,33 +590,33 @@ { "explicit subject" => [ - %q({ + '{ "@context": {"foaf": "http://xmlns.com/foaf/0.1/"}, "@id": "http://greggkellogg.net/foaf#me", "foaf:knows": { "@id": "http://www.ivan-herman.net/foaf#me", "foaf:name": "Ivan Herman" } - }), - %q( + }', + ' . "Ivan Herman" . - ) + ' ], "implicit subject" => [ - %q({ + '{ "@context": {"foaf": "http://xmlns.com/foaf/0.1/"}, "@id": "http://greggkellogg.net/foaf#me", "foaf:knows": { "foaf:name": "Manu Sporny" } - }), - %q( + }', + ' _:a . _:a "Manu Sporny"^^xsd:string . - ) - ], + ' + ] }.each do |title, (js, ttl)| it title do ttl = "@prefix xsd: . #{ttl}" @@ -627,16 +629,16 @@ { "literals" => [ - %q({ + '{ "@context": {"foaf": "http://xmlns.com/foaf/0.1/"}, "@id": "http://greggkellogg.net/foaf#me", "foaf:knows": ["Manu Sporny", "Ivan Herman"] - }), - %q( + }', + ' "Manu Sporny" . "Ivan Herman" . - ) - ], + ' + ] }.each do |title, (js, ttl)| it title do ttl = "@prefix xsd: . #{ttl}" @@ -648,74 +650,74 @@ context "lists" do { "Empty" => [ - %q({ + '{ "@context": {"foaf": "http://xmlns.com/foaf/0.1/"}, "@id": "http://greggkellogg.net/foaf#me", "foaf:knows": {"@list": []} - }), - %q( + }', + ' . - ) + ' ], "single value" => [ - %q({ + '{ "@context": {"foaf": "http://xmlns.com/foaf/0.1/"}, "@id": "http://greggkellogg.net/foaf#me", "foaf:knows": {"@list": ["Manu Sporny"]} - }), - %q( + }', + ' _:a . _:a "Manu Sporny"^^xsd:string . _:a . - ) + ' ], "single value (with coercion)" => [ - %q({ + '{ "@context": { "foaf": "http://xmlns.com/foaf/0.1/", "foaf:knows": { "@container": "@list"} }, "@id": "http://greggkellogg.net/foaf#me", "foaf:knows": ["Manu Sporny"] - }), - %q( + }', + ' _:a . _:a "Manu Sporny"^^xsd:string . _:a . - ) + ' ], "multiple values" => [ - %q({ + '{ "@context": {"foaf": "http://xmlns.com/foaf/0.1/"}, "@id": "http://greggkellogg.net/foaf#me", "foaf:knows": {"@list": ["Manu Sporny", "Dave Longley"]} - }), - %q( + }', + ' _:a . _:a "Manu Sporny"^^xsd:string . _:a _:b . _:b "Dave Longley"^^xsd:string . _:b . - ) + ' ], "@list containing @list" => [ - %q({ + '{ "@id": "http://example/A", "http://example.com/foo": {"@list": [{"@list": ["baz"]}]} - }), - %q( + }', + ' (("baz")) . - ) + ' ], "@list containing empty @list" => [ %({ "@id": "http://example/A", "http://example.com/foo": {"@list": [{"@list": []}]} }), - %q( + ' (()) . - ) - ], + ' + ] }.each do |title, (js, ttl)| it title do ttl = "@prefix xsd: . #{ttl}" @@ -728,20 +730,20 @@ { "@id coersion" => [ - %q({ + '{ "@context": { "knows": {"@id": "http://xmlns.com/foaf/0.1/knows", "@type": "@id"} }, "@id": "http://greggkellogg.net/foaf#me", "knows": "http://www.ivan-herman.net/foaf#me" - }), - %q( + }', + ' . - ) + ' ], "datatype coersion" => [ - %q({ + '{ "@context": { "dcterms": "http://purl.org/dc/terms/", "xsd": "http://www.w3.org/2001/XMLSchema#", @@ -749,132 +751,132 @@ }, "@id": "http://greggkellogg.net/foaf#me", "created": "1957-02-27" - }), - %q( + }', + ' "1957-02-27"^^ . - ) + ' ], "sub-objects with context" => [ - %q({ + '{ "@context": {"foo": "http://example.com/foo"}, "foo": { "@context": {"foo": "http://example.org/foo"}, "foo": "bar" } - }), - %q( + }', + ' _:a _:b . _:b "bar"^^xsd:string . - ) + ' ], "contexts with a list processed in order" => [ - %q({ + '{ "@context": [ {"foo": "http://example.com/foo"}, {"foo": "http://example.org/foo"} ], "foo": "bar" - }), - %q( + }', + ' _:b "bar"^^xsd:string . - ) + ' ], "term definition resolves term as IRI" => [ - %q({ + '{ "@context": [ {"foo": "http://example.com/foo"}, {"bar": "foo"} ], "bar": "bar" - }), - %q( + }', + ' _:b "bar"^^xsd:string . - ) + ' ], "term definition resolves prefix as IRI" => [ - %q({ + '{ "@context": [ {"foo": "http://example.com/foo#"}, {"bar": "foo:bar"} ], "bar": "bar" - }), - %q( + }', + ' _:b "bar"^^xsd:string . - ) + ' ], "@language" => [ - %q({ + '{ "@context": { "foo": "http://example.com/foo#", "@language": "en" }, "foo:bar": "baz" - }), - %q( + }', + ' _:a "baz"@en . - ) + ' ], "@language with override" => [ - %q({ + '{ "@context": { "foo": "http://example.com/foo#", "@language": "en" }, "foo:bar": {"@value": "baz", "@language": "fr"} - }), - %q( + }', + ' _:a "baz"@fr . - ) + ' ], "@language with plain" => [ - %q({ + '{ "@context": { "foo": "http://example.com/foo#", "@language": "en" }, "foo:bar": {"@value": "baz"} - }), - %q( + }', + ' _:a "baz"^^xsd:string . - ) - ], + ' + ] }.each do |title, (js, ttl)| it title do ttl = "@prefix xsd: . #{ttl}" expect(parse(js)).to be_equivalent_graph(ttl, base: "http://example/", logger: logger, inputDocument: js) end end - + context "coercion" do context "term def with @id + @type" do { "dt with term" => [ - %q({ + '{ "@context": [ {"date": "http://www.w3.org/2001/XMLSchema#date", "term": "http://example.org/foo#"}, {"foo": {"@id": "term", "@type": "date"}} ], "foo": "bar" - }), - %q( + }', + ' @prefix xsd: . [ "bar"^^xsd:date ] . - ) + ' ], "@id with term" => [ - %q({ + '{ "@context": [ {"foo": {"@id": "http://example.org/foo#bar", "@type": "@id"}} ], "foo": "http://example.org/foo#bar" - }), - %q( + }', + ' _:a . - ) + ' ], "coercion without term definition" => [ - %q({ + '{ "@context": [ { "xsd": "http://www.w3.org/2001/XMLSchema#", @@ -885,13 +887,13 @@ } ], "dc:date": "2011-11-23" - }), - %q( + }', + ' @prefix xsd: . @prefix dc: . [ dc:date "2011-11-23"^^xsd:date] . - ) - ], + ' + ] }.each do |title, (js, ttl)| it title do ttl = "@prefix xsd: . #{ttl}" @@ -905,29 +907,29 @@ context "term def with @id + @type + @container" do { "dt with term" => [ - %q({ + '{ "@context": [ {"date": "http://www.w3.org/2001/XMLSchema#date", "term": "http://example.org/foo#"}, {"foo": {"@id": "term", "@type": "date", "@container": "@list"}} ], "foo": ["bar"] - }), - %q( + }', + ' @prefix xsd: . [ ("bar"^^xsd:date) ] . - ) + ' ], "@id with term" => [ - %q({ + '{ "@context": [ {"foo": {"@id": "http://example.org/foo#bar", "@type": "@id", "@container": "@list"}} ], "foo": ["http://example.org/foo#bar"] - }), - %q( + }', + ' _:a () . - ) - ], + ' + ] }.each do |title, (js, ttl)| it title do ttl = "@prefix xsd: . #{ttl}" @@ -939,7 +941,7 @@ end context "blank node predicates" do - subject {%q({"@id": "http://example/subj", "_:foo": "bar"})} + subject { '{"@id": "http://example/subj", "_:foo": "bar"}' } it "outputs statements with blank node predicates if :produceGeneralizedRdf is true" do expect do @@ -958,7 +960,7 @@ context "@included" do { - "Basic Included array": { + 'Basic Included array': { input: %({ "@context": { "@version": 1.1, @@ -974,7 +976,7 @@ [ "value2"] . ) }, - "Basic Included object": { + 'Basic Included object': { input: %({ "@context": { "@version": 1.1, @@ -990,7 +992,7 @@ [ "value2"] . ) }, - "Multiple properties mapping to @included are folded together": { + 'Multiple properties mapping to @included are folded together': { input: %({ "@context": { "@version": 1.1, @@ -1006,7 +1008,7 @@ [ "value2"] . ) }, - "Included containing @included": { + 'Included containing @included': { input: %({ "@context": { "@version": 1.1, @@ -1028,7 +1030,7 @@ [ "value3"] . ) }, - "Property value with @included": { + 'Property value with @included': { input: %({ "@context": { "@version": 1.1, @@ -1046,7 +1048,7 @@ [a ] . ) }, - "json.api example": { + 'json.api example': { input: %({ "@context": { "@version": 1.1, @@ -1167,9 +1169,9 @@ ; "dgeb" . ) - }, + } }.each do |title, params| - it(title) {run_to_rdf params} + it(title) { run_to_rdf params } end end @@ -1177,60 +1179,60 @@ { "number syntax (decimal)" => [ - %q({"@context": { "measure": "http://example/measure#"}, "measure:cups": 5.3}), - %q(_:a "5.3E0"^^ .) + '{"@context": { "measure": "http://example/measure#"}, "measure:cups": 5.3}', + '_:a "5.3E0"^^ .' ], "number syntax (double)" => [ - %q({"@context": { "measure": "http://example/measure#"}, "measure:cups": 5.3e0}), - %q(_:a "5.3E0"^^ .) + '{"@context": { "measure": "http://example/measure#"}, "measure:cups": 5.3e0}', + '_:a "5.3E0"^^ .' ], "number syntax (integer)" => [ - %q({"@context": { "chem": "http://example/chem#"}, "chem:protons": 12}), - %q(_:a "12"^^ .) + '{"@context": { "chem": "http://example/chem#"}, "chem:protons": 12}', + '_:a "12"^^ .' ], "boolan syntax" => [ - %q({"@context": { "sensor": "http://example/sensor#"}, "sensor:active": true}), - %q(_:a "true"^^ .) + '{"@context": { "sensor": "http://example/sensor#"}, "sensor:active": true}', + '_:a "true"^^ .' ], "Array top element" => [ - %q([ + '[ {"@id": "http://example.com/#me", "@type": "http://xmlns.com/foaf/0.1/Person"}, {"@id": "http://example.com/#you", "@type": "http://xmlns.com/foaf/0.1/Person"} - ]), - %q( + ]', + ' . . - ) + ' ], "@graph with array of objects value" => [ - %q({ + '{ "@context": {"foaf": "http://xmlns.com/foaf/0.1/"}, "@graph": [ {"@id": "http://example.com/#me", "@type": "foaf:Person"}, {"@id": "http://example.com/#you", "@type": "foaf:Person"} ] - }), - %q( + }', + ' . . - ) + ' ], "XMLLiteral" => [ - %q({ + '{ "http://rdfs.org/sioc/ns#content": { "@value": "foo", "@type": "http://www.w3.org/1999/02/22-rdf-syntax-ns#XMLLiteral" } - }), - %q( + }', + ' [ "foo"^^] . - ) - ], + ' + ] }.each do |title, (js, ttl)| it title do ttl = "@prefix xsd: . #{ttl}" @@ -1242,13 +1244,13 @@ context "@direction" do context "rdfDirection: null" do { - "no language rtl": [ - %q({"http://example.org/label": {"@value": "no language", "@direction": "rtl"}}), - %q(_:a "no language" .) + 'no language rtl': [ + '{"http://example.org/label": {"@value": "no language", "@direction": "rtl"}}', + '_:a "no language" .' ], - "en-US rtl": [ - %q({"http://example.org/label": {"@value": "en-US", "@language": "en-US", "@direction": "rtl"}}), - %q(_:a "en-US"@en-us .) + 'en-US rtl': [ + '{"http://example.org/label": {"@value": "en-US", "@language": "en-US", "@direction": "rtl"}}', + '_:a "en-US"@en-us .' ] }.each do |title, (js, ttl)| it title do @@ -1260,49 +1262,51 @@ context "rdfDirection: i18n-datatype" do { - "no language rtl": [ - %q({"http://example.org/label": {"@value": "no language", "@direction": "rtl"}}), - %q(_:a "no language"^^ .) + 'no language rtl': [ + '{"http://example.org/label": {"@value": "no language", "@direction": "rtl"}}', + '_:a "no language"^^ .' ], - "en-US rtl": [ - %q({"http://example.org/label": {"@value": "en-US", "@language": "en-US", "@direction": "rtl"}}), - %q(_:a "en-US"^^ .) + 'en-US rtl': [ + '{"http://example.org/label": {"@value": "en-US", "@language": "en-US", "@direction": "rtl"}}', + '_:a "en-US"^^ .' ] }.each do |title, (js, ttl)| it title do ttl = "@prefix xsd: . #{ttl}" - expect(parse(js, rdfDirection: 'i18n-datatype')).to be_equivalent_graph(ttl, logger: logger, inputDocument: js) + expect(parse(js, + rdfDirection: 'i18n-datatype')).to be_equivalent_graph(ttl, logger: logger, inputDocument: js) end end end context "rdfDirection: compound-literal" do { - "no language rtl": [ - %q({"http://example.org/label": {"@value": "no language", "@direction": "rtl"}}), - %q( + 'no language rtl': [ + '{"http://example.org/label": {"@value": "no language", "@direction": "rtl"}}', + ' @prefix rdf: . _:a [ rdf:value "no language"; rdf:direction "rtl" ] . - ) + ' ], - "en-US rtl": [ - %q({"http://example.org/label": {"@value": "en-US", "@language": "en-US", "@direction": "rtl"}}), - %q( + 'en-US rtl': [ + '{"http://example.org/label": {"@value": "en-US", "@language": "en-US", "@direction": "rtl"}}', + ' @prefix rdf: . _:a [ rdf:value "en-US"; rdf:language "en-us"; rdf:direction "rtl" ] . - ) + ' ] }.each do |title, (js, ttl)| it title do ttl = "@prefix xsd: . #{ttl}" - expect(parse(js, rdfDirection: 'compound-literal')).to be_equivalent_graph(ttl, logger: logger, inputDocument: js) + expect(parse(js, + rdfDirection: 'compound-literal')).to be_equivalent_graph(ttl, logger: logger, inputDocument: js) end end end @@ -1310,7 +1314,7 @@ context "JSON-LD-star" do { - "node with embedded subject without rdfstar option": { + 'node with embedded subject without rdfstar option': { input: %({ "@id": { "@id": "ex:rei", @@ -1319,13 +1323,13 @@ "ex:prop": "value2" }), exception: JSON::LD::JsonLdError::InvalidIdValue - }, + } }.each do |title, params| - it(title) {run_to_rdf params} + it(title) { run_to_rdf params } end { - "node with embedded subject having no @id": { + 'node with embedded subject having no @id': { input: %({ "@id": { "ex:prop": "value" @@ -1334,9 +1338,9 @@ }), expected: %( <<_:b0 "value">> "value2" . - ), + ) }, - "node with embedded subject having IRI @id": { + 'node with embedded subject having IRI @id': { input: %({ "@id": { "@id": "ex:rei", @@ -1346,9 +1350,9 @@ }), expected: %( << "value">> "value2" . - ), + ) }, - "node with embedded subject having BNode @id": { + 'node with embedded subject having BNode @id': { input: %({ "@id": { "@id": "_:rei", @@ -1358,9 +1362,9 @@ }), expected: %( <<_:b0 "value">> "value2" . - ), + ) }, - "node with embedded subject having a type": { + 'node with embedded subject having a type': { input: %({ "@id": { "@id": "ex:rei", @@ -1370,9 +1374,9 @@ }), expected: %( << >> "value2" . - ), + ) }, - "node with embedded subject having an IRI value": { + 'node with embedded subject having an IRI value': { input: %({ "@id": { "@id": "ex:rei", @@ -1382,9 +1386,9 @@ }), expected: %( << >> "value2" . - ), + ) }, - "node with embedded subject having an BNode value": { + 'node with embedded subject having an BNode value': { input: %({ "@id": { "@id": "ex:rei", @@ -1394,9 +1398,9 @@ }), expected: %( << _:b0>> "value2" . - ), + ) }, - "node with recursive embedded subject": { + 'node with recursive embedded subject': { input: %({ "@id": { "@id": { @@ -1409,9 +1413,9 @@ }), expected: %( <<<< "value3">> "value">> "value2" . - ), + ) }, - "illegal node with subject having no property": { + 'illegal node with subject having no property': { input: %({ "@id": { "@id": "ex:rei" @@ -1420,7 +1424,7 @@ }), exception: JSON::LD::JsonLdError::InvalidEmbeddedNode }, - "illegal node with subject having multiple properties": { + 'illegal node with subject having multiple properties': { input: %({ "@id": { "@id": "ex:rei", @@ -1430,7 +1434,7 @@ }), exception: JSON::LD::JsonLdError::InvalidEmbeddedNode }, - "illegal node with subject having multiple types": { + 'illegal node with subject having multiple types': { input: %({ "@id": { "@id": "ex:rei", @@ -1440,7 +1444,7 @@ }), exception: JSON::LD::JsonLdError::InvalidEmbeddedNode }, - "illegal node with subject having type and property": { + 'illegal node with subject having type and property': { input: %({ "@id": { "@id": "ex:rei", @@ -1451,7 +1455,7 @@ }), exception: JSON::LD::JsonLdError::InvalidEmbeddedNode }, - "node with embedded object": { + 'node with embedded object': { input: %({ "@id": "ex:subj", "ex:value": { @@ -1463,9 +1467,9 @@ }), expected: %( << "value">> . - ), + ) }, - "node with embedded object having properties": { + 'node with embedded object having properties': { input: %({ "@id": "ex:subj", "ex:value": { @@ -1479,9 +1483,9 @@ expected: %( << "value">> . << "value">> "value2" . - ), + ) }, - "node with recursive embedded object": { + 'node with recursive embedded object': { input: %({ "@id": "ex:subj", "ex:value": { @@ -1498,18 +1502,22 @@ expected: %( <<<< "value3">> "value">> . <<<< "value3">> "value">> "value2" . - ), - }, + ) + } }.each do |title, params| context(title) do - it "Generates statements" do - output_graph = RDF::Graph.new {|g| g << RDF::NTriples::Reader.new(params[:expected], rdfstar: true)} - run_to_rdf params.merge(rdfstar: true, output: output_graph) - end if params[:expected] + if params[:expected] + it "Generates statements" do + output_graph = RDF::Graph.new { |g| g << RDF::NTriples::Reader.new(params[:expected], rdfstar: true) } + run_to_rdf params.merge(rdfstar: true, output: output_graph) + end + end - it "Exception" do - run_to_rdf params.merge(rdfstar: true) - end if params[:exception] + if params[:exception] + it "Exception" do + run_to_rdf params.merge(rdfstar: true) + end + end end end end @@ -1566,7 +1574,7 @@ }), output: %(), pending: "jruby" - }, + } }.each do |title, params| it(title) do pending params[:pending] if params[:pending] == RUBY_ENGINE @@ -1578,7 +1586,7 @@ context "html" do { - "Transforms embedded JSON-LD script element": { + 'Transforms embedded JSON-LD script element': { input: %( @@ -1594,7 +1602,7 @@ ), output: %([ ( "bar")] .) }, - "Transforms first script element with extractAllScripts: false": { + 'Transforms first script element with extractAllScripts: false': { input: %( @@ -1620,7 +1628,7 @@ output: %([ ( "bar")] .), extractAllScripts: false }, - "Transforms targeted script element": { + 'Transforms targeted script element': { input: %( @@ -1648,11 +1656,11 @@ [ "bar"] . ), base: "http://example.org/doc#second" - }, + } }.each do |title, params| it(title) do params[:input] = StringIO.new(params[:input]) - params[:input].send(:define_singleton_method, :content_type) {"text/html"} + params[:input].send(:define_singleton_method, :content_type) { "text/html" } run_to_rdf params.merge(validate: true) end end @@ -1660,23 +1668,32 @@ def parse(input, **options) graph = options[:graph] || RDF::Graph.new - options = {logger: logger, validate: true, canonicalize: false}.merge(options) - JSON::LD::API.toRdf(StringIO.new(input), rename_bnodes: false, **options) {|st| graph << st} + options = { logger: logger, validate: true, canonicalize: false }.merge(options) + JSON::LD::API.toRdf(StringIO.new(input), rename_bnodes: false, **options) { |st| graph << st } graph end def run_to_rdf(params) - input, output = params[:input], params[:output] + input = params[:input] + output = params[:output] graph = params[:graph] || RDF::Graph.new input = StringIO.new(input) if input.is_a?(String) pending params.fetch(:pending, "test implementation") unless input if params[:exception] - expect {JSON::LD::API.toRdf(input, **params)}.to raise_error(params[:exception]) + expect { JSON::LD::API.toRdf(input, **params) }.to raise_error(params[:exception]) else if params[:write] - expect{JSON::LD::API.toRdf(input, base: params[:base], logger: logger, rename_bnodes: false, **params) {|st| graph << st}}.to write(params[:write]).to(:error) + expect do + JSON::LD::API.toRdf(input, base: params[:base], logger: logger, rename_bnodes: false, **params) do |st| + graph << st + end + end.to write(params[:write]).to(:error) else - expect{JSON::LD::API.toRdf(input, base: params[:base], logger: logger, rename_bnodes: false, **params) {|st| graph << st}}.not_to write.to(:error) + expect do + JSON::LD::API.toRdf(input, base: params[:base], logger: logger, rename_bnodes: false, **params) do |st| + graph << st + end + end.not_to write.to(:error) end expect(graph).to be_equivalent_graph(output, logger: logger, inputDocument: input) end diff --git a/spec/writer_spec.rb b/spec/writer_spec.rb index e18222e2..3ecca30e 100644 --- a/spec/writer_spec.rb +++ b/spec/writer_spec.rb @@ -1,24 +1,25 @@ -# coding: utf-8 +# frozen_string_literal: true + require_relative 'spec_helper' require 'rdf/spec/writer' describe JSON::LD::Writer do - let(:logger) {RDF::Spec.logger} + let(:logger) { RDF::Spec.logger } - after(:each) {|example| puts logger.to_s if example.exception} + after { |example| puts logger if example.exception } it_behaves_like 'an RDF::Writer' do - let(:writer) {JSON::LD::Writer.new(StringIO.new, logger: logger)} + let(:writer) { described_class.new(StringIO.new, logger: logger) } end describe ".for" do [ :jsonld, "etc/doap.jsonld", - {file_name: 'etc/doap.jsonld'}, - {file_extension: 'jsonld'}, - {content_type: 'application/ld+json'}, - {content_type: 'application/x-ld+json'}, + { file_name: 'etc/doap.jsonld' }, + { file_extension: 'jsonld' }, + { content_type: 'application/ld+json' }, + { content_type: 'application/x-ld+json' } ].each do |arg| it "discovers with #{arg.inspect}" do expect(RDF::Reader.for(arg)).to eq JSON::LD::Reader @@ -27,26 +28,26 @@ end context "simple tests" do - it "should use full URIs without base" do + it "uses full URIs without base" do input = %( .) expect(serialize(input)).to produce_jsonld([{ - '@id' => "http://a/b", - "http://a/c" => [{"@id" => "http://a/d"}] + '@id' => "http://a/b", + "http://a/c" => [{ "@id" => "http://a/d" }] }], logger) end - it "should use qname URIs with standard prefix" do + it "uses qname URIs with standard prefix" do input = %( .) expect(serialize(input, standard_prefixes: true)).to produce_jsonld({ '@context' => { - "foaf" => "http://xmlns.com/foaf/0.1/", + "foaf" => "http://xmlns.com/foaf/0.1/" }, - '@id' => "foaf:b", - "foaf:c" => {"@id" => "foaf:d"} + '@id' => "foaf:b", + "foaf:c" => { "@id" => "foaf:d" } }, logger) end - it "should use qname URIs with parsed prefix" do + it "uses qname URIs with parsed prefix" do input = %( . "Rhythm Paradise"@en . @@ -54,52 +55,52 @@ "rhythm-tengoku" . ) expect(serialize(input, prefixes: { - dc: "http://purl.org/dc/terms/", - frbr: "http://vocab.org/frbr/core#", - senet: "https://senet.org/ns#", + dc: "http://purl.org/dc/terms/", + frbr: "http://vocab.org/frbr/core#", + senet: "https://senet.org/ns#" })).to produce_jsonld({ '@context' => { "dc" => "http://purl.org/dc/terms/", "frbr" => "http://vocab.org/frbr/core#", "senet" => "https://senet.org/ns#" }, - '@id' => "https://senet.org/gm", - "@type" => "frbr:Work", - "dc:title" => {"@value" => "Rhythm Paradise","@language" => "en"}, - "senet:unofficialTitle" => {"@value" => "Rhythm Tengoku","@language" => "en"}, + '@id' => "https://senet.org/gm", + "@type" => "frbr:Work", + "dc:title" => { "@value" => "Rhythm Paradise", "@language" => "en" }, + "senet:unofficialTitle" => { "@value" => "Rhythm Tengoku", "@language" => "en" }, "senet:urlkey" => "rhythm-tengoku" }, logger) end - it "should use CURIEs with empty prefix" do + it "uses CURIEs with empty prefix" do input = %( .) begin - expect(serialize(input, prefixes: { "" => RDF::Vocab::FOAF})). - to produce_jsonld({ - "@context" => { - "" => "http://xmlns.com/foaf/0.1/" - }, - '@id' => ":b", - ":c" => {"@id" => ":d"} - }, logger) + expect(serialize(input, prefixes: { "" => RDF::Vocab::FOAF })) + .to produce_jsonld({ + "@context" => { + "" => "http://xmlns.com/foaf/0.1/" + }, + '@id' => ":b", + ":c" => { "@id" => ":d" } + }, logger) rescue JSON::LD::JsonLdError, JSON::LD::JsonLdError, TypeError => e - fail("#{e.class}: #{e.message}\n" + - "#{logger}\n" + - "Backtrace:\n#{e.backtrace.join("\n")}") + raise("#{e.class}: #{e.message}\n" \ + "#{logger}\n" \ + "Backtrace:\n#{e.backtrace.join("\n")}") end end - - it "should not use terms if no suffix" do + + it "does not use terms if no suffix" do input = %( .) - expect(serialize(input, standard_prefixes: true)). - not_to produce_jsonld({ - "@context" => {"foaf" => "http://xmlns.com/foaf/0.1/"}, - '@id' => "foaf", - "foaf" => {"@id" => "foaf"} - }, logger) + expect(serialize(input, standard_prefixes: true)) + .not_to produce_jsonld({ + "@context" => { "foaf" => "http://xmlns.com/foaf/0.1/" }, + '@id' => "foaf", + "foaf" => { "@id" => "foaf" } + }, logger) end - - it "should not use CURIE with illegal local part" do + + it "does not use CURIE with illegal local part" do input = %( @prefix db: . @prefix dbo: . @@ -107,46 +108,47 @@ ) expect(serialize(input, prefixes: { - "db" => RDF::URI("http://dbpedia.org/resource/"), - "dbo" => RDF::URI("http://dbpedia.org/ontology/")})). - to produce_jsonld({ - "@context" => { - "db" => "http://dbpedia.org/resource/", - "dbo" => "http://dbpedia.org/ontology/" - }, - '@id' => "db:Michael_Jackson", - "dbo:artistOf" => {"@id" => "db:%28I_Can%27t_Make_It%29_Another_Day"} - }, logger) + "db" => RDF::URI("http://dbpedia.org/resource/"), + "dbo" => RDF::URI("http://dbpedia.org/ontology/") + })) + .to produce_jsonld({ + "@context" => { + "db" => "http://dbpedia.org/resource/", + "dbo" => "http://dbpedia.org/ontology/" + }, + '@id' => "db:Michael_Jackson", + "dbo:artistOf" => { "@id" => "db:%28I_Can%27t_Make_It%29_Another_Day" } + }, logger) end - it "should not use provided node identifiers if :unique_bnodes set" do - input = %(_:a _:b \.) + it "does not use provided node identifiers if :unique_bnodes set" do + input = %(_:a _:b .) result = serialize(input, unique_bnodes: true, context: {}) - expect(result.to_json).to match(%r(_:g\w+)) + expect(result.to_json).to match(/_:g\w+/) end it "serializes multiple subjects" do - input = %q( + input = ' @prefix : . @prefix dc: . a :TestCase . a :TestCase . - ) - expect(serialize(input, prefixes: {"" => "http://www.w3.org/2006/03/test-description#"})). - to produce_jsonld({ - '@context' => { - "" => "http://www.w3.org/2006/03/test-description#", - "dc" => RDF::Vocab::DC.to_s - }, - '@graph' => [ - {'@id' => "http://example.com/test-cases/0001", '@type' => ":TestCase"}, - {'@id' => "http://example.com/test-cases/0002", '@type' => ":TestCase"} - ] - }, logger) + ' + expect(serialize(input, prefixes: { "" => "http://www.w3.org/2006/03/test-description#" })) + .to produce_jsonld({ + '@context' => { + "" => "http://www.w3.org/2006/03/test-description#", + "dc" => RDF::Vocab::DC.to_s + }, + '@graph' => [ + { '@id' => "http://example.com/test-cases/0001", '@type' => ":TestCase" }, + { '@id' => "http://example.com/test-cases/0002", '@type' => ":TestCase" } + ] + }, logger) end it "serializes Wikia OWL example" do - input = %q( + input = ' @prefix owl: . @prefix rdf: . @prefix rdfs: . @@ -158,47 +160,49 @@ owl:minQualifiedCardinality "1"^^xsd:nonNegativeInteger; owl:onClass ; owl:onProperty . - ) + ' expect(serialize(input, rename_bnodes: false, prefixes: { - owl: "http://www.w3.org/2002/07/owl#", + owl: "http://www.w3.org/2002/07/owl#", rdfs: "http://www.w3.org/2000/01/rdf-schema#", - xsd: "http://www.w3.org/2001/XMLSchema#" - })). - to produce_jsonld({ - '@context' => { - "owl" => "http://www.w3.org/2002/07/owl#", - "rdf" => "http://www.w3.org/1999/02/22-rdf-syntax-ns#", - "rdfs" => "http://www.w3.org/2000/01/rdf-schema#", - "xsd" => "http://www.w3.org/2001/XMLSchema#" - }, - '@graph' => [ - { - "@id" => "_:a", - "@type" => "owl:Restriction", - "owl:minQualifiedCardinality" => {"@value" => "1","@type" => "xsd:nonNegativeInteger"}, - "owl:onClass" => {"@id" => "http://data.wikia.com/terms#Element"}, - "owl:onProperty" => {"@id" => "http://data.wikia.com/terms#characterIn"} + xsd: "http://www.w3.org/2001/XMLSchema#" + })) + .to produce_jsonld({ + '@context' => { + "owl" => "http://www.w3.org/2002/07/owl#", + "rdf" => "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + "rdfs" => "http://www.w3.org/2000/01/rdf-schema#", + "xsd" => "http://www.w3.org/2001/XMLSchema#" }, - { - "@id" => "http://data.wikia.com/terms#Character", - "@type" => "owl:Class", - "rdfs:subClassOf" => {"@id" => "_:a"} - } - ] - }, logger) + '@graph' => [ + { + "@id" => "_:a", + "@type" => "owl:Restriction", + "owl:minQualifiedCardinality" => { "@value" => "1", "@type" => "xsd:nonNegativeInteger" }, + "owl:onClass" => { "@id" => "http://data.wikia.com/terms#Element" }, + "owl:onProperty" => { "@id" => "http://data.wikia.com/terms#characterIn" } + }, + { + "@id" => "http://data.wikia.com/terms#Character", + "@type" => "owl:Class", + "rdfs:subClassOf" => { "@id" => "_:a" } + } + ] + }, logger) end end context "RDF-star" do { - "subject-iii": { + 'subject-iii': { input: RDF::Statement( RDF::Statement( RDF::URI('http://example/s1'), RDF::URI('http://example/p1'), - RDF::URI('http://example/o1')), + RDF::URI('http://example/o1') + ), RDF::URI('http://example/p'), - RDF::URI('http://example/o')), + RDF::URI('http://example/o') + ), output: %({ "@context": {"ex": "http://example/"}, "@id": { @@ -208,14 +212,16 @@ "ex:p": {"@id": "ex:o"} }) }, - "subject-iib": { + 'subject-iib': { input: RDF::Statement( RDF::Statement( RDF::URI('http://example/s1'), RDF::URI('http://example/p1'), - RDF::Node.new('o1')), + RDF::Node.new('o1') + ), RDF::URI('http://example/p'), - RDF::URI('http://example/o')), + RDF::URI('http://example/o') + ), output: %({ "@context": {"ex": "http://example/"}, "@id": { @@ -225,14 +231,16 @@ "ex:p": {"@id": "ex:o"} }) }, - "subject-iil": { + 'subject-iil': { input: RDF::Statement( RDF::Statement( RDF::URI('http://example/s1'), RDF::URI('http://example/p1'), - RDF::Literal('o1')), + RDF::Literal('o1') + ), RDF::URI('http://example/p'), - RDF::URI('http://example/o')), + RDF::URI('http://example/o') + ), output: %({ "@context": {"ex": "http://example/"}, "@id": { @@ -242,14 +250,16 @@ "ex:p": {"@id": "ex:o"} }) }, - "subject-bii": { + 'subject-bii': { input: RDF::Statement( RDF::Statement( RDF::Node('s1'), RDF::URI('http://example/p1'), - RDF::URI('http://example/o1')), + RDF::URI('http://example/o1') + ), RDF::URI('http://example/p'), - RDF::URI('http://example/o')), + RDF::URI('http://example/o') + ), output: %({ "@context": {"ex": "http://example/"}, "@id": { @@ -259,13 +269,15 @@ "ex:p": {"@id": "ex:o"} }) }, - "subject-bib": { + 'subject-bib': { input: RDF::Statement( RDF::Statement( RDF::Node('s1'), RDF::URI('http://example/p1'), - RDF::Node.new('o1')), - RDF::URI('http://example/p'), RDF::URI('http://example/o')), + RDF::Node.new('o1') + ), + RDF::URI('http://example/p'), RDF::URI('http://example/o') + ), output: %({ "@context": {"ex": "http://example/"}, "@id": { @@ -275,14 +287,16 @@ "ex:p": {"@id": "ex:o"} }) }, - "subject-bil": { + 'subject-bil': { input: RDF::Statement( RDF::Statement( RDF::Node('s1'), RDF::URI('http://example/p1'), - RDF::Literal('o1')), + RDF::Literal('o1') + ), RDF::URI('http://example/p'), - RDF::URI('http://example/o')), + RDF::URI('http://example/o') + ), output: %({ "@context": {"ex": "http://example/"}, "@id": { @@ -292,14 +306,16 @@ "ex:p": {"@id": "ex:o"} }) }, - "object-iii": { + 'object-iii': { input: RDF::Statement( RDF::URI('http://example/s'), RDF::URI('http://example/p'), RDF::Statement( RDF::URI('http://example/s1'), RDF::URI('http://example/p1'), - RDF::URI('http://example/o1'))), + RDF::URI('http://example/o1') + ) + ), output: %({ "@context": {"ex": "http://example/"}, "@id": "ex:s", @@ -311,14 +327,16 @@ } }) }, - "object-iib": { + 'object-iib': { input: RDF::Statement( RDF::URI('http://example/s'), RDF::URI('http://example/p'), RDF::Statement( RDF::URI('http://example/s1'), RDF::URI('http://example/p1'), - RDF::Node.new('o1'))), + RDF::Node.new('o1') + ) + ), output: %({ "@context": {"ex": "http://example/"}, "@id": "ex:s", @@ -330,14 +348,16 @@ } }) }, - "object-iil": { + 'object-iil': { input: RDF::Statement( RDF::URI('http://example/s'), RDF::URI('http://example/p'), RDF::Statement( RDF::URI('http://example/s1'), RDF::URI('http://example/p1'), - RDF::Literal('o1'))), + RDF::Literal('o1') + ) + ), output: %({ "@context": {"ex": "http://example/"}, "@id": "ex:s", @@ -349,17 +369,20 @@ } }) }, - "recursive-subject": { + 'recursive-subject': { input: RDF::Statement( RDF::Statement( RDF::Statement( RDF::URI('http://example/s2'), RDF::URI('http://example/p2'), - RDF::URI('http://example/o2')), + RDF::URI('http://example/o2') + ), RDF::URI('http://example/p1'), - RDF::URI('http://example/o1')), + RDF::URI('http://example/o1') + ), RDF::URI('http://example/p'), - RDF::URI('http://example/o')), + RDF::URI('http://example/o') + ), output: %({ "@context": {"ex": "http://example/"}, "@id": { @@ -371,40 +394,43 @@ }, "ex:p": {"@id": "ex:o"} }) - }, + } }.each do |name, params| it name do - graph = RDF::Graph.new {|g| g << params[:input]} + graph = RDF::Graph.new { |g| g << params[:input] } expect( - serialize(graph, rdfstar: true, prefixes: {ex: 'http://example/'}) + serialize(graph, rdfstar: true, prefixes: { ex: 'http://example/' }) ).to produce_jsonld(JSON.parse(params[:output]), logger) end end end - context "Writes fromRdf tests to isomorphic graph" do - require 'suite_helper' - m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}fromRdf-manifest.jsonld") - describe m.name do - m.entries.each do |t| - next unless t.positiveTest? && !t.property('input').include?('0016') - specify "#{t.property('@id')}: #{t.name}" do - logger.info "test: #{t.inspect}" - logger.info "source: #{t.input}" - t.logger = logger - pending "Shared list BNode in different graphs" if t.property('input').include?("fromRdf-0021") - repo = RDF::Repository.load(t.input_loc, format: :nquads) - jsonld = JSON::LD::Writer.buffer(logger: t.logger, **t.options) do |writer| - writer << repo - end + unless ENV['CI'] + context "Writes fromRdf tests to isomorphic graph" do + require 'suite_helper' + m = Fixtures::SuiteTest::Manifest.open("#{Fixtures::SuiteTest::SUITE}fromRdf-manifest.jsonld") + describe m.name do + m.entries.each do |t| + next unless t.positiveTest? && !t.property('input').include?('0016') + + specify "#{t.property('@id')}: #{t.name}" do + logger.info "test: #{t.inspect}" + logger.info "source: #{t.input}" + t.logger = logger + pending "Shared list BNode in different graphs" if t.property('input').include?("fromRdf-0021") + repo = RDF::Repository.load(t.input_loc, format: :nquads) + jsonld = described_class.buffer(logger: t.logger, **t.options) do |writer| + writer << repo + end - # And then, re-generate jsonld as RDF - - expect(parse(jsonld, format: :jsonld, **t.options)).to be_equivalent_graph(repo, t) + # And then, re-generate jsonld as RDF + + expect(parse(jsonld, format: :jsonld, **t.options)).to be_equivalent_graph(repo, t) + end end end end - end unless ENV['CI'] + end def parse(input, format: :trig, **options) reader = RDF::Reader.for(format) @@ -414,14 +440,14 @@ def parse(input, format: :trig, **options) # Serialize ntstr to a string and compare against regexps def serialize(ntstr, **options) g = ntstr.is_a?(String) ? parse(ntstr, **options) : ntstr - #logger.info g.dump(:ttl) + # logger.info g.dump(:ttl) result = JSON::LD::Writer.buffer(logger: logger, **options) do |writer| writer << g end if $verbose - #puts hash.to_json + # puts hash.to_json end - + JSON.parse(result) end end