diff --git a/tools/exp/convert/convert.go b/tools/exp/convert/convert.go index 8005d77..ca5d1a4 100644 --- a/tools/exp/convert/convert.go +++ b/tools/exp/convert/convert.go @@ -7,6 +7,7 @@ import ( "github.com/cjslep/activity/tools/exp/gen" "github.com/cjslep/activity/tools/exp/rdf" "github.com/dave/jennifer/jen" + "net/url" "strings" ) @@ -22,6 +23,8 @@ type File struct { // vocabulary is a set of code generators for the vocabulary. type vocabulary struct { + Name string + URI *url.URL Values map[string]*gen.Kind FProps map[string]*gen.FunctionalPropertyGenerator NFProps map[string]*gen.NonFunctionalPropertyGenerator @@ -118,10 +121,9 @@ const ( // implementations. Developers' applications should only rely on the interfaces, // which are used internally anyway. type Converter struct { - Registry *rdf.RDFRegistry - GenRoot *gen.PackageManager - VocabularyName string - PackagePolicy PackagePolicy + Registry *rdf.RDFRegistry + GenRoot *gen.PackageManager + PackagePolicy PackagePolicy } // Convert turns a ParsedVocabulary into a set of code-generated files. @@ -169,7 +171,7 @@ func (c Converter) convertToFiles(v vocabulary) (f []*File, e error) { // Functional Properties for _, i := range v.FProps { var pm *gen.PackageManager - pm, e = c.propertyPackageManager(i) + pm, e = c.propertyPackageManager(i, v.Name) if e != nil { return } @@ -195,7 +197,7 @@ func (c Converter) convertToFiles(v vocabulary) (f []*File, e error) { // Non-Functional Properties for _, i := range v.NFProps { var pm *gen.PackageManager - pm, e = c.propertyPackageManager(i) + pm, e = c.propertyPackageManager(i, v.Name) if e != nil { return } @@ -224,7 +226,7 @@ func (c Converter) convertToFiles(v vocabulary) (f []*File, e error) { // Types for _, i := range v.Types { var pm *gen.PackageManager - pm, e = c.typePackageManager(i) + pm, e = c.typePackageManager(i, v.Name) if e != nil { return } @@ -263,7 +265,7 @@ func (c Converter) convertToFiles(v vocabulary) (f []*File, e error) { Directory: pub.WriteDir(), }) var files []*File - files, e = c.rootFiles(pub, c.VocabularyName, v) + files, e = c.rootFiles(pub, v.Name, v) if e != nil { return } @@ -291,6 +293,8 @@ func (c Converter) convertToFiles(v vocabulary) (f []*File, e error) { // but since there is no need, it isn't addressed now. func (c Converter) convertVocabulary(p *rdf.ParsedVocabulary) (v vocabulary, e error) { v = newVocabulary() + v.Name = p.Vocab.Name + v.URI = p.Vocab.URI for k, val := range p.Vocab.Values { v.Values[k] = c.convertValue(val) } @@ -354,7 +358,7 @@ func (c Converter) convertType(t rdf.VocabularyType, existingTypes map[string]*gen.TypeGenerator) (tg *gen.TypeGenerator, e error) { // Determine the gen package name var pm *gen.PackageManager - pm, e = c.typePackageManager(t) + pm, e = c.typePackageManager(t, v.Name) if e != nil { return } @@ -480,7 +484,7 @@ func (c Converter) convertFunctionalProperty(p rdf.VocabularyProperty, return } var pm *gen.PackageManager - pm, e = c.propertyPackageManager(p) + pm, e = c.propertyPackageManager(p, v.Name) if e != nil { return } @@ -515,7 +519,7 @@ func (c Converter) convertNonFunctionalProperty(p rdf.VocabularyProperty, return } var pm *gen.PackageManager - pm, e = c.propertyPackageManager(p) + pm, e = c.propertyPackageManager(p, v.Name) if e != nil { return } @@ -671,14 +675,14 @@ func (c Converter) vocabValuePackage(v rdf.VocabularyValue) gen.Package { // typePackageManager returns a package manager for an individual type. It may // be the same as other types depending on the code generation policy. -func (c Converter) typePackageManager(v typeNamer) (pkg *gen.PackageManager, e error) { - return c.packageManager("type_" + v.TypeName()) +func (c Converter) typePackageManager(v typeNamer, vocabName string) (pkg *gen.PackageManager, e error) { + return c.packageManager("type_"+v.TypeName(), vocabName) } // propertyPackageManager returns a package manager for an individual property. // It may be the same as other types depending on the code generation policy. -func (c Converter) propertyPackageManager(v propertyNamer) (pkg *gen.PackageManager, e error) { - return c.packageManager("property_" + v.PropertyName()) +func (c Converter) propertyPackageManager(v propertyNamer, vocabName string) (pkg *gen.PackageManager, e error) { + return c.packageManager("property_"+v.PropertyName(), vocabName) } // packageManager applies the code generation package policy and returns a @@ -690,13 +694,13 @@ func (c Converter) propertyPackageManager(v propertyNamer) (pkg *gen.PackageMana // The IndividualUnderRoot policy puts each property and each type in their own // package, and each of those packages has their own public and private // subpackages. -func (c Converter) packageManager(s string) (pkg *gen.PackageManager, e error) { +func (c Converter) packageManager(s, vocabName string) (pkg *gen.PackageManager, e error) { s = strings.ToLower(s) switch c.PackagePolicy { case FlatUnderRoot: - pkg = c.GenRoot.Sub(strings.ToLower(c.VocabularyName)) + pkg = c.GenRoot.Sub(strings.ToLower(vocabName)) case IndividualUnderRoot: - pkg = c.GenRoot.Sub(strings.ToLower(c.VocabularyName)).SubPrivate(s) + pkg = c.GenRoot.Sub(strings.ToLower(vocabName)).SubPrivate(s) default: e = fmt.Errorf("unrecognized PackagePolicy: %v", c.PackagePolicy) } @@ -750,7 +754,7 @@ func (c Converter) packageFiles(v vocabulary) (f []*File, e error) { }) // Public Package Documentation docFile := jen.NewFilePath(pub.Path()) - docFile.PackageComment(gen.VocabPackageComment(pub.Name(), c.VocabularyName)) + docFile.PackageComment(gen.VocabPackageComment(pub.Name(), v.Name)) f = append(f, &File{ F: docFile, FileName: "gen_doc.go", @@ -774,7 +778,7 @@ func (c Converter) packageFiles(v vocabulary) (f []*File, e error) { }) // Private Package Documentation privDocFile := jen.NewFilePath(priv.Path()) - privDocFile.PackageComment(gen.PrivateFlatPackageComment(priv.Name(), c.VocabularyName)) + privDocFile.PackageComment(gen.PrivateFlatPackageComment(priv.Name(), v.Name)) f = append(f, &File{ F: privDocFile, FileName: "gen_doc.go", @@ -783,7 +787,7 @@ func (c Converter) packageFiles(v vocabulary) (f []*File, e error) { case IndividualUnderRoot: for _, tg := range v.Types { var file []*File - file, e = c.typePackageFiles(tg) + file, e = c.typePackageFiles(tg, v.Name) if e != nil { return } @@ -791,7 +795,7 @@ func (c Converter) packageFiles(v vocabulary) (f []*File, e error) { } for _, pg := range v.FProps { var file []*File - file, e = c.propertyPackageFiles(&pg.PropertyGenerator) + file, e = c.propertyPackageFiles(&pg.PropertyGenerator, v.Name) if e != nil { return } @@ -799,7 +803,7 @@ func (c Converter) packageFiles(v vocabulary) (f []*File, e error) { } for _, pg := range v.NFProps { var file []*File - file, e = c.propertyPackageFiles(&pg.PropertyGenerator) + file, e = c.propertyPackageFiles(&pg.PropertyGenerator, v.Name) if e != nil { return } @@ -813,7 +817,7 @@ func (c Converter) packageFiles(v vocabulary) (f []*File, e error) { // typePackageFile creates the package-level files necessary for a type if it // is being generated in its own package. -func (c Converter) typePackageFiles(tg *gen.TypeGenerator) (f []*File, e error) { +func (c Converter) typePackageFiles(tg *gen.TypeGenerator, vocabName string) (f []*File, e error) { // Only need one for all types. tpg := gen.NewTypePackageGenerator() pubI := tpg.PublicDefinitions([]*gen.TypeGenerator{tg}) @@ -829,7 +833,7 @@ func (c Converter) typePackageFiles(tg *gen.TypeGenerator) (f []*File, e error) // Public Package Documentation -- this may collide, but it's all the // same content. docFile := jen.NewFilePath(pub.Path()) - docFile.PackageComment(gen.VocabPackageComment(pub.Name(), c.VocabularyName)) + docFile.PackageComment(gen.VocabPackageComment(pub.Name(), vocabName)) f = append(f, &File{ F: docFile, FileName: "gen_doc.go", @@ -864,14 +868,14 @@ func (c Converter) typePackageFiles(tg *gen.TypeGenerator) (f []*File, e error) // propertyPackageFiles creates the package-level files necessary for a property // if it is being generated in its own package. -func (c Converter) propertyPackageFiles(pg *gen.PropertyGenerator) (f []*File, e error) { +func (c Converter) propertyPackageFiles(pg *gen.PropertyGenerator, vocabName string) (f []*File, e error) { // Only need one for all types. ppg := gen.NewPropertyPackageGenerator() // Public Package Documentation -- this may collide, but it's all the // same content. pub := pg.GetPublicPackage() docFile := jen.NewFilePath(pub.Path()) - docFile.PackageComment(gen.VocabPackageComment(pub.Name(), c.VocabularyName)) + docFile.PackageComment(gen.VocabPackageComment(pub.Name(), vocabName)) f = append(f, &File{ F: docFile, FileName: "gen_doc.go", diff --git a/tools/exp/custom_spec.json b/tools/exp/custom_spec.json index 30bf8f7..9cc2bea 100644 --- a/tools/exp/custom_spec.json +++ b/tools/exp/custom_spec.json @@ -1,7 +1,7 @@ { "@context": [ { - "as": "http://www.w3.org/TR/activitystreams-vocabulary/", + "as": "http://www.w3.org/TR/activitystreams-vocabulary", "owl": "http://www.w3.org/2002/07/owl#", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", "rdfs": "http://www.w3.org/2000/01/rdf-schema#", @@ -25,7 +25,7 @@ "url": "schema:URL" } ], - "id": "https://example.com/fake-vocabulary/", + "id": "https://example.com/fake-vocabulary", "type": "owl:Ontology", "name": "FakeVocabulary", "members": [ diff --git a/tools/exp/main.go b/tools/exp/main.go index c9a2ada..972be70 100644 --- a/tools/exp/main.go +++ b/tools/exp/main.go @@ -37,13 +37,6 @@ func init() { mustAddOntology(&rfc.RFCOntology{Package: "rfc"}) } -var ( - input = flag.String("input", "spec.json", "Input JSON-LD specification used to generate Go code.") - // TODO: Be more rigorous when applying this. Also, clear the default value I am using for convenience. - prefix = flag.String("prefix", "github.com/cjslep/activity/tools/exp/tmp", "Package prefix to use for all generated package paths. This should be the prefix in the GOPATH directory if generating in a subdirectory.") - individual = flag.Bool("individual", false, "Whether to generate types and properties in individual packages.") -) - type list []string func (l *list) String() string { @@ -56,33 +49,61 @@ func (l *list) Set(v string) error { return nil } -func main() { - flag.Parse() - // TODO: Flag validation +type CommandLineFlags struct { + specs list + prefix *string + individual *bool +} - b, err := ioutil.ReadFile(*input) - if err != nil { +func NewCommandLineFlags() *CommandLineFlags { + c := &CommandLineFlags{ + // TODO: Be more rigorous when applying this. Also, clear the default value I am using for convenience. + prefix: flag.String("prefix", "github.com/cjslep/activity/tools/exp/tmp", "Package prefix to use for all generated package paths. This should be the prefix in the GOPATH directory if generating in a subdirectory."), + individual: flag.Bool("individual", false, "Whether to generate types and properties in individual packages."), + } + flag.Var(&(c.specs), "spec", "Input JSON-LD specification used to generate Go code.") + flag.Parse() + if err := c.validate(); err != nil { panic(err) } - var inputJSON map[string]interface{} - err = json.Unmarshal(b, &inputJSON) - if err != nil { - panic(err) + return c +} + +func (c *CommandLineFlags) validate() error { + if len(c.specs) == 0 { + return fmt.Errorf("specs must not be empty") } - p, err := rdf.ParseVocabulary(registry, inputJSON) + return nil +} + +func main() { + cmd := NewCommandLineFlags() + + inputJSONs := make([]rdf.JSONLD, 0, len(cmd.specs)) + for _, spec := range cmd.specs { + b, err := ioutil.ReadFile(spec) + if err != nil { + panic(err) + } + var inputJSON map[string]interface{} + err = json.Unmarshal(b, &inputJSON) + if err != nil { + panic(err) + } + inputJSONs = append(inputJSONs, inputJSON) + } + p, err := rdf.ParseVocabularies(registry, inputJSONs) if err != nil { panic(err) } policy := convert.FlatUnderRoot - if *individual { + if *cmd.individual { policy = convert.IndividualUnderRoot } - fmt.Printf("Vocab Name: %q\n", p.Vocab.Name) c := &convert.Converter{ - Registry: registry, - GenRoot: gen.NewPackageManager(*prefix, "gen"), - VocabularyName: p.Vocab.Name, - PackagePolicy: policy, + Registry: registry, + GenRoot: gen.NewPackageManager(*cmd.prefix, "gen"), + PackagePolicy: policy, } f, err := c.Convert(p) if err != nil { @@ -96,5 +117,5 @@ func main() { panic(e) } } - fmt.Printf("done\n") + fmt.Printf("Done\n") } diff --git a/tools/exp/rdf/data.go b/tools/exp/rdf/data.go index 43ccac4..5ae6584 100644 --- a/tools/exp/rdf/data.go +++ b/tools/exp/rdf/data.go @@ -22,14 +22,22 @@ type ParsedVocabulary struct { } // GetReference looks up a reference based on its URI. -func (p *ParsedVocabulary) GetReference(uri string) *Vocabulary { +func (p *ParsedVocabulary) GetReference(uri string) (*Vocabulary, error) { + httpSpec, httpsSpec, err := toHttpAndHttps(uri) + if err != nil { + return nil, err + } if p.References == nil { p.References = make(map[string]*Vocabulary, 0) } - if _, ok := p.References[uri]; !ok { + if v, ok := p.References[httpSpec]; ok { + return v, nil + } else if v, ok := p.References[httpsSpec]; ok { + return v, nil + } else { p.References[uri] = &Vocabulary{} } - return p.References[uri] + return p.References[uri], nil } // String returns a printable version of this ParsedVocabulary for debugging. diff --git a/tools/exp/rdf/ontology.go b/tools/exp/rdf/ontology.go index 1125cbe..c9b143e 100644 --- a/tools/exp/rdf/ontology.go +++ b/tools/exp/rdf/ontology.go @@ -169,7 +169,12 @@ func (l *langstring) Apply(key string, value interface{}, ctx *ParsingContext) ( if e != nil { return true, e } - e = ctx.Result.GetReference(rdfSpec).SetValue(langstringSpec, &VocabularyValue{ + var vocab *Vocabulary + vocab, e = ctx.Result.GetReference(rdfSpec) + if e != nil { + return true, e + } + e = vocab.SetValue(langstringSpec, &VocabularyValue{ Name: langstringSpec, URI: u, DefinitionType: jen.Map(jen.String()).String(), diff --git a/tools/exp/rdf/parse.go b/tools/exp/rdf/parse.go index d46871f..c332665 100644 --- a/tools/exp/rdf/parse.go +++ b/tools/exp/rdf/parse.go @@ -156,15 +156,47 @@ type RDFNode interface { Apply(key string, value interface{}, ctx *ParsingContext) (bool, error) } -// ParseVocabulary parses the specified input as an ActivityStreams context that +// ParseVocabularies parses the provided inputs in order as an ActivityStreams +// context that specifies one or more extension vocabularies. +func ParseVocabularies(registry *RDFRegistry, inputs []JSONLD) (vocabulary *ParsedVocabulary, err error) { + vocabulary = &ParsedVocabulary{ + References: make(map[string]*Vocabulary, len(inputs)-1), + } + for i, input := range inputs { + var v *ParsedVocabulary + v, err = parseVocabulary(registry, input, vocabulary) + if err != nil { + return + } + for k, v := range v.References { + vocabulary.References[k] = v + } + if i < len(inputs)-1 { + registry.reset() + err = registry.AddOntology(&ReferenceOntology{v.Vocab}) + if err != nil { + return + } + vocabulary.References[v.Vocab.URI.String()] = &v.Vocab + } else { + vocabulary.Vocab = v.Vocab + } + } + return +} + +// parseVocabulary parses the specified input as an ActivityStreams context that // specifies a Core, Extended, or Extension vocabulary. -func ParseVocabulary(registry *RDFRegistry, input JSONLD) (vocabulary *ParsedVocabulary, err error) { +func parseVocabulary(registry *RDFRegistry, input JSONLD, references *ParsedVocabulary) (vocabulary *ParsedVocabulary, err error) { var nodes []RDFNode nodes, err = parseJSONLDContext(registry, input) if err != nil { return } - vocabulary = &ParsedVocabulary{} + vocabulary = &ParsedVocabulary{References: make(map[string]*Vocabulary)} + for k, v := range references.References { + vocabulary.References[k] = v + } ctx := &ParsingContext{ Result: vocabulary, } @@ -182,6 +214,7 @@ func ParseVocabulary(registry *RDFRegistry, input JSONLD) (vocabulary *ParsedVoc if err != nil { return } + ctx.Reset() // Step 2: Populate value and referenced types. err = resolveReferences(registry, ctx) if err != nil { @@ -189,31 +222,24 @@ func ParseVocabulary(registry *RDFRegistry, input JSONLD) (vocabulary *ParsedVoc } // Step 3: Populate VocabularyType's 'Properties' and // 'WithoutProperties' fields - err = populatePropertiesOnTypes(ctx) + err = populatePropertiesOnTypes(registry, ctx) return } // populatePropertiesOnTypes populates the 'Properties' and 'WithoutProperties' // entries on a VocabularyType. -func populatePropertiesOnTypes(ctx *ParsingContext) error { +func populatePropertiesOnTypes(registry *RDFRegistry, ctx *ParsingContext) error { for _, p := range ctx.Result.Vocab.Properties { - if err := populatePropertyOnTypes(p, "", ctx); err != nil { + if err := populatePropertyOnTypes(registry, p, "", ctx); err != nil { return err } } - for vName, ref := range ctx.Result.References { - for _, p := range ref.Properties { - if err := populatePropertyOnTypes(p, vName, ctx); err != nil { - return err - } - } - } return nil } // populatePropertyOnTypes populates the VocabularyType's 'Properties' and // 'WithoutProperties' fields based on the 'Domain' and 'DoesNotApplyTo'. -func populatePropertyOnTypes(p VocabularyProperty, vocabName string, ctx *ParsingContext) error { +func populatePropertyOnTypes(registry *RDFRegistry, p VocabularyProperty, vocabName string, ctx *ParsingContext) error { ref := VocabularyReference{ Name: p.Name, URI: p.URI, @@ -228,13 +254,17 @@ func populatePropertyOnTypes(p VocabularyProperty, vocabName string, ctx *Parsin t.Properties = append(t.Properties, ref) ctx.Result.Vocab.Types[d.Name] = t } else { - v, ok := ctx.Result.References[d.Vocab] - if !ok { - return fmt.Errorf("cannot populate property on type for vocab %q", d.Vocab) + vocab := d.Vocab + if u, err := registry.ResolveAlias(d.Vocab); err == nil { + vocab = u + } + v, err := ctx.Result.GetReference(vocab) + if err != nil { + return err } t, ok := v.Types[d.Name] if !ok { - return fmt.Errorf("cannot populate property on type %q for vocab %q", d.Name, d.Vocab) + return fmt.Errorf("cannot populate property on type %q for vocab %q", d.Name, vocab) } t.Properties = append(t.Properties, ref) v.Types[d.Name] = t @@ -249,13 +279,17 @@ func populatePropertyOnTypes(p VocabularyProperty, vocabName string, ctx *Parsin t.WithoutProperties = append(t.WithoutProperties, ref) ctx.Result.Vocab.Types[dna.Name] = t } else { - v, ok := ctx.Result.References[dna.Vocab] - if !ok { - return fmt.Errorf("cannot populate withoutproperty on type for vocab %q", dna.Vocab) + vocab := dna.Vocab + if u, err := registry.ResolveAlias(dna.Vocab); err == nil { + vocab = u + } + v, err := ctx.Result.GetReference(vocab) + if err != nil { + return err } t, ok := v.Types[dna.Name] if !ok { - return fmt.Errorf("cannot populate withoutproperty on type %q for vocab %q", dna.Name, dna.Vocab) + return fmt.Errorf("cannot populate withoutproperty on type %q for vocab %q", dna.Name, vocab) } t.WithoutProperties = append(t.WithoutProperties, ref) v.Types[dna.Name] = t @@ -321,7 +355,10 @@ func resolveReference(reference VocabularyReference, registry *RDFRegistry, ctx if e != nil { return e } - vocab = ctx.Result.GetReference(url) + vocab, e = ctx.Result.GetReference(url) + if e != nil { + return e + } } if _, ok := vocab.Types[reference.Name]; ok { return nil diff --git a/tools/exp/rdf/rdf.go b/tools/exp/rdf/rdf.go index 0d42074..0f3fb3d 100644 --- a/tools/exp/rdf/rdf.go +++ b/tools/exp/rdf/rdf.go @@ -2,6 +2,7 @@ package rdf import ( "fmt" + "net/url" "strings" ) @@ -41,6 +42,31 @@ func SplitAlias(s string) []string { } } +// toHttpAndHttps converts a URI to both its http and https versions. +func toHttpAndHttps(s string) (http, https string, err error) { + // Trailing fragments are not preserved by url.Parse, so we + // need to do proper bookkeeping and preserve it if present. + hasFragment := s[len(s)-1] == '#' + var specUri *url.URL + specUri, err = url.Parse(s) + if err != nil { + return "", "", err + } + // HTTP + httpScheme := *specUri + httpScheme.Scheme = HTTP + http = httpScheme.String() + // HTTPS + httpsScheme := *specUri + httpsScheme.Scheme = HTTPS + https = httpsScheme.String() + if hasFragment { + http += "#" + https += "#" + } + return +} + // joinAlias combines a string and prepends an RDF alias to it. func joinAlias(alias, s string) string { return fmt.Sprintf("%s%s%s", alias, ALIAS_DELIMITER, s) @@ -140,14 +166,28 @@ func (r *RDFRegistry) AddOntology(o Ontology) error { if r.ontologies == nil { r.ontologies = make(map[string]Ontology, 1) } - s := o.SpecURI() - if _, ok := r.ontologies[s]; ok { - return fmt.Errorf("ontology already registered for %q", s) + specString := o.SpecURI() + httpSpec, httpsSpec, err := toHttpAndHttps(specString) + if err != nil { + return err } - r.ontologies[s] = o + if _, ok := r.ontologies[httpSpec]; ok { + return fmt.Errorf("ontology already registered for %q", httpSpec) + } + if _, ok := r.ontologies[httpsSpec]; ok { + return fmt.Errorf("ontology already registered for %q", httpsSpec) + } + r.ontologies[httpSpec] = o + r.ontologies[httpsSpec] = o return nil } +// reset clears the registry in preparation for loading another JSONLD context. +func (r *RDFRegistry) reset() { + r.aliases = make(map[string]string) + r.aliasedNodes = make(map[string]aliasedNode) +} + // getFor gets RDFKeyers based on a context's string. // // Package public. diff --git a/tools/exp/rdf/referencing.go b/tools/exp/rdf/referencing.go new file mode 100644 index 0000000..a66ad87 --- /dev/null +++ b/tools/exp/rdf/referencing.go @@ -0,0 +1,212 @@ +package rdf + +import ( + "fmt" +) + +var ( + _ Ontology = &ReferenceOntology{} +) + +// ReferenceOntology wraps a previously-parsed spec so it can be made known to +// the registry. +type ReferenceOntology struct { + v Vocabulary +} + +// SpecURI returns the URI for this specification +func (r *ReferenceOntology) SpecURI() string { + return r.v.URI.String() +} + +// Load loads the ontology without an alias. +func (r *ReferenceOntology) Load() ([]RDFNode, error) { + return r.LoadAsAlias("") +} + +// LoadAsAlias loads the vocabulary ontology with an alias. +// +// Values cannot be loaded because their serialization and deserialization types +// are not known at runtime if not embedded in the go-fed tool. If the error is +// generated when running the tool, then file a bug so that the tool can +// properly "know" about this particular value and how to serialize and +// deserialize it properly. +func (r *ReferenceOntology) LoadAsAlias(s string) ([]RDFNode, error) { + var nodes []RDFNode + for name, t := range r.v.Types { + nodes = append(nodes, &AliasedDelegate{ + Spec: r.v.URI.String(), + Alias: s, + Name: name, + Delegate: &typeReference{t: t, vocabName: r.SpecURI()}, + }) + } + for name, p := range r.v.Properties { + nodes = append(nodes, &AliasedDelegate{ + Spec: r.v.URI.String(), + Alias: s, + Name: name, + Delegate: &propertyReference{p: p, vocabName: r.SpecURI()}, + }) + } + // Note: Values cannot be added this way as there's no way to detect + // at runtime what the correct serialization and deserialization scheme + // are for particular vocabulary values. Therefore, we omit them here + // and will emit an error. + // + // If this error is emitted, it means a code change to the tool is + // required. A new ontology implementation for this vocabulary needs to + // be added, and a hardcoded implementation of the value's serialization + // and deserialization functions must be created. This will then let the + // rest of the generated code properly serialize and deserialize these + // values. + if len(r.v.Values) > 0 { + return nil, fmt.Errorf("known limitation: value type definitions in a new vocabulary must be embedded in the go-fed tool to ensure that the value is properly serialized and deserialized. This tool is not intelligent enough to automatically somehow deduce what encoding is necessary for new values.") + } + return nodes, nil +} + +// LoadSpecificAsAlias loads a specific RDFNode with the given alias. +// +// Values cannot be loaded because their serialization and deserialization types +// are not known at runtime if not embedded in the go-fed tool. If the error is +// generated when running the tool, then file a bug so that the tool can +// properly "know" about this particular value and how to serialize and +// deserialize it properly. +func (r *ReferenceOntology) LoadSpecificAsAlias(alias, name string) ([]RDFNode, error) { + if t, ok := r.v.Types[name]; ok { + return []RDFNode{ + &AliasedDelegate{ + Spec: "", + Alias: "", + Name: alias, + Delegate: &typeReference{t: t, vocabName: r.SpecURI()}, + }, + }, nil + } + if p, ok := r.v.Properties[name]; ok { + return []RDFNode{ + &AliasedDelegate{ + Spec: "", + Alias: "", + Name: alias, + Delegate: &propertyReference{p: p, vocabName: r.SpecURI()}, + }, + }, nil + } + if _, ok := r.v.Values[name]; ok { + // Note: Values cannot be added this way as there's no way to detect + // at runtime what the correct serialization and deserialization scheme + // are for particular vocabulary values. Therefore, we omit them here + // and will emit an error. + // + // If this error is emitted, it means a code change to the tool is + // required. A new ontology implementation for this vocabulary needs to + // be added, and a hardcoded implementation of the value's serialization + // and deserialization functions must be created. This will then let the + // rest of the generated code properly serialize and deserialize these + // values. + return nil, fmt.Errorf("known limitation: value type definitions in a new vocabulary must be embedded in the go-fed tool to ensure that the value is properly serialized and deserialized. This tool is not intelligent enough to automatically somehow deduce what encoding is necessary for new values.") + } + return nil, fmt.Errorf("ontology (%s) cannot find %q to make alias %q", r.SpecURI(), name, alias) +} + +// LoadElement does nothing. +func (r *ReferenceOntology) LoadElement(name string, payload map[string]interface{}) ([]RDFNode, error) { + return nil, nil +} + +// GetByName returns a raw, unguarded node by name. +// +// Values cannot be loaded because their serialization and deserialization types +// are not known at runtime if not embedded in the go-fed tool. If the error is +// generated when running the tool, then file a bug so that the tool can +// properly "know" about this particular value and how to serialize and +// deserialize it properly. +func (r *ReferenceOntology) GetByName(name string) (RDFNode, error) { + if t, ok := r.v.Types[name]; ok { + return &typeReference{t: t, vocabName: r.SpecURI()}, nil + } + if p, ok := r.v.Properties[name]; ok { + return &propertyReference{p: p, vocabName: r.SpecURI()}, nil + } + if _, ok := r.v.Values[name]; ok { + // Note: Values cannot be added this way as there's no way to detect + // at runtime what the correct serialization and deserialization scheme + // are for particular vocabulary values. Therefore, we omit them here + // and will emit an error. + // + // If this error is emitted, it means a code change to the tool is + // required. A new ontology implementation for this vocabulary needs to + // be added, and a hardcoded implementation of the value's serialization + // and deserialization functions must be created. This will then let the + // rest of the generated code properly serialize and deserialize these + // values. + return nil, fmt.Errorf("known limitation: value type definitions in a new vocabulary must be embedded in the go-fed tool to ensure that the value is properly serialized and deserialized. This tool is not intelligent enough to automatically somehow deduce what encoding is necessary for new values.") + } + return nil, fmt.Errorf("ontology (%s) cannot find node for name %s", r.SpecURI(), name) +} + +var _ RDFNode = &typeReference{} + +// typeReference adds a VocabularyReference for a VocabularyType in another +// vocabulary. +type typeReference struct { + t VocabularyType + vocabName string +} + +// Enter returns an error. +func (*typeReference) Enter(key string, ctx *ParsingContext) (bool, error) { + return true, fmt.Errorf("typeReference cannot be entered") +} + +// Exit returns an error. +func (*typeReference) Exit(key string, ctx *ParsingContext) (bool, error) { + return true, fmt.Errorf("typeReference cannot be exited") +} + +// Apply sets a reference in the context. +func (t *typeReference) Apply(key string, value interface{}, ctx *ParsingContext) (bool, error) { + ref, ok := ctx.Current.(*VocabularyReference) + if !ok { + // May be during resolve reference phase -- nothing to do. + return true, nil + } + ref.Name = t.t.GetName() + ref.URI = t.t.URI + ref.Vocab = t.vocabName + return true, nil +} + +var _ RDFNode = &propertyReference{} + +// typeReference adds a VocabularyReference for a VocabularyProperty in another +// vocabulary. +type propertyReference struct { + p VocabularyProperty + vocabName string +} + +// Enter returns an error. +func (*propertyReference) Enter(key string, ctx *ParsingContext) (bool, error) { + return true, fmt.Errorf("propertyReference cannot be entered") +} + +// Exit returns an error. +func (*propertyReference) Exit(key string, ctx *ParsingContext) (bool, error) { + return true, fmt.Errorf("propertyReference cannot be exited") +} + +// Apply sets a reference in the context. +func (p *propertyReference) Apply(key string, value interface{}, ctx *ParsingContext) (bool, error) { + ref, ok := ctx.Current.(*VocabularyReference) + if !ok { + // May be during resolve reference phase -- nothing to do. + return true, nil + } + ref.Name = p.p.GetName() + ref.URI = p.p.URI + ref.Vocab = p.vocabName + return true, nil +} diff --git a/tools/exp/rdf/rfc/ontology.go b/tools/exp/rdf/rfc/ontology.go index efe1ccc..288e1cd 100644 --- a/tools/exp/rdf/rfc/ontology.go +++ b/tools/exp/rdf/rfc/ontology.go @@ -132,7 +132,10 @@ func (b *bcp47) Exit(key string, ctx *rdf.ParsingContext) (bool, error) { // Apply adds BCP47 as a value Kind. func (b *bcp47) Apply(key string, value interface{}, ctx *rdf.ParsingContext) (bool, error) { - v := ctx.Result.GetReference(rfcSpec) + v, err := ctx.Result.GetReference(rfcSpec) + if err != nil { + return true, err + } if len(v.Values[bcp47Spec].Name) == 0 { u, err := url.Parse(rfcSpec + bcp47Spec) if err != nil { @@ -216,7 +219,10 @@ func (*mime) Exit(key string, ctx *rdf.ParsingContext) (bool, error) { // Apply adds MIME as a value Kind. func (m *mime) Apply(key string, value interface{}, ctx *rdf.ParsingContext) (bool, error) { - v := ctx.Result.GetReference(rfcSpec) + v, err := ctx.Result.GetReference(rfcSpec) + if err != nil { + return true, err + } if len(v.Values[mimeSpec].Name) == 0 { u, err := url.Parse(rfcSpec + mimeSpec) if err != nil { @@ -300,7 +306,10 @@ func (*rel) Exit(key string, ctx *rdf.ParsingContext) (bool, error) { // Apply adds rel as a supported value Kind. func (r *rel) Apply(key string, value interface{}, ctx *rdf.ParsingContext) (bool, error) { - v := ctx.Result.GetReference(rfcSpec) + v, err := ctx.Result.GetReference(rfcSpec) + if err != nil { + return true, err + } if len(v.Values[relSpec].Name) == 0 { u, err := url.Parse(rfcSpec + relSpec) if err != nil { diff --git a/tools/exp/rdf/schema/ontology.go b/tools/exp/rdf/schema/ontology.go index c14cced..5dd9f80 100644 --- a/tools/exp/rdf/schema/ontology.go +++ b/tools/exp/rdf/schema/ontology.go @@ -3,6 +3,7 @@ package schema import ( "fmt" "github.com/cjslep/activity/tools/exp/rdf" + neturl "net/url" "strings" ) @@ -265,6 +266,40 @@ func (n *name) Apply(key string, value interface{}, ctx *rdf.ParsingContext) (bo } else if ns, ok := ctx.Current.(rdf.NameSetter); !ok { return true, fmt.Errorf("schema name not given NameSetter in context") } else { + var vocab string + // Parse will interpret "ActivityStreams" as a valid URL without + // a scheme. It will also interpret "as:Object" as a valid URL + // with a scheme of "as". + if u, err := neturl.Parse(s); err == nil && len(u.Scheme) > 0 && len(u.Host) > 0 { + // If the name is a URL, use heuristics to determine the + // name versus vocabulary part. + // + // The vocabulary is usually the URI without the + // fragment or final path entry. The name is usually the + // fragment or final path entry. + if len(u.Fragment) > 0 { + // Attempt to parse the fragment + s = u.Fragment + u.Fragment = "" + vocab = u.String() + } else { + // Use the final path component + comp := strings.Split(s, "/") + s = comp[len(comp)-1] + vocab = strings.Join(comp[:len(comp)-1], "/") + } + } else if sp := rdf.SplitAlias(s); len(sp) == 2 { + // The name may be aliased. + vocab = sp[0] + s = sp[1] + } // Else the name has no vocabulary reference. + if len(vocab) > 0 { + if ref, ok := ctx.Current.(*rdf.VocabularyReference); !ok { + return true, fmt.Errorf("schema name not given *rdf.VocabularyReference in context") + } else { + ref.Vocab = vocab + } + } ns.SetName(s) ctx.Name = s return true, nil diff --git a/tools/exp/rdf/xsd/ontology.go b/tools/exp/rdf/xsd/ontology.go index 6eaa004..54ea7f1 100644 --- a/tools/exp/rdf/xsd/ontology.go +++ b/tools/exp/rdf/xsd/ontology.go @@ -199,7 +199,10 @@ func (a *anyURI) Exit(key string, ctx *rdf.ParsingContext) (bool, error) { // Apply adds the anyURI value Kind to the XML namespace. func (a *anyURI) Apply(key string, value interface{}, ctx *rdf.ParsingContext) (bool, error) { - v := ctx.Result.GetReference(xmlSpec) + v, err := ctx.Result.GetReference(xmlSpec) + if err != nil { + return true, err + } if len(v.Values[anyURISpec].Name) == 0 { u, err := url.Parse(xmlSpec + anyURISpec) if err != nil { @@ -296,7 +299,10 @@ func (d *dateTime) Exit(key string, ctx *rdf.ParsingContext) (bool, error) { // Apply adds the xsd:dateTime value Kind to the XML namespace. func (d *dateTime) Apply(key string, value interface{}, ctx *rdf.ParsingContext) (bool, error) { - v := ctx.Result.GetReference(xmlSpec) + v, err := ctx.Result.GetReference(xmlSpec) + if err != nil { + return true, err + } if len(v.Values[dateTimeSpec].Name) == 0 { u, err := url.Parse(xmlSpec + dateTimeSpec) if err != nil { @@ -403,7 +409,10 @@ func (f *float) Exit(key string, ctx *rdf.ParsingContext) (bool, error) { // Apply adds xsd:float value Kind to the XML namespace. func (f *float) Apply(key string, value interface{}, ctx *rdf.ParsingContext) (bool, error) { - v := ctx.Result.GetReference(xmlSpec) + v, err := ctx.Result.GetReference(xmlSpec) + if err != nil { + return true, err + } if len(v.Values[floatSpec].Name) == 0 { u, err := url.Parse(xmlSpec + floatSpec) if err != nil { @@ -487,7 +496,10 @@ func (*xmlString) Exit(key string, ctx *rdf.ParsingContext) (bool, error) { // Apply adds xsd:xmlString value Kind to the XML namespace. func (s *xmlString) Apply(key string, value interface{}, ctx *rdf.ParsingContext) (bool, error) { - v := ctx.Result.GetReference(xmlSpec) + v, err := ctx.Result.GetReference(xmlSpec) + if err != nil { + return true, err + } if len(v.Values[stringSpec].Name) == 0 { u, err := url.Parse(xmlSpec + stringSpec) if err != nil { @@ -571,7 +583,10 @@ func (*boolean) Exit(key string, ctx *rdf.ParsingContext) (bool, error) { // Apply adds boolean value Kind to the XML namespace. func (b *boolean) Apply(key string, value interface{}, ctx *rdf.ParsingContext) (bool, error) { - v := ctx.Result.GetReference(xmlSpec) + v, err := ctx.Result.GetReference(xmlSpec) + if err != nil { + return true, err + } if len(v.Values[booleanSpec].Name) == 0 { u, err := url.Parse(xmlSpec + booleanSpec) if err != nil { @@ -686,7 +701,10 @@ func (*nonNegativeInteger) Exit(key string, ctx *rdf.ParsingContext) (bool, erro // Apply adds xsd:nonNegativeInteger value Kind to the XML namespace. func (n *nonNegativeInteger) Apply(key string, value interface{}, ctx *rdf.ParsingContext) (bool, error) { - v := ctx.Result.GetReference(xmlSpec) + v, err := ctx.Result.GetReference(xmlSpec) + if err != nil { + return true, err + } if len(v.Values[nonNegativeIntegerSpec].Name) == 0 { u, err := url.Parse(xmlSpec + nonNegativeIntegerSpec) if err != nil { @@ -788,7 +806,10 @@ func (*duration) Exit(key string, ctx *rdf.ParsingContext) (bool, error) { // // Avoid at all costs. func (d *duration) Apply(key string, value interface{}, ctx *rdf.ParsingContext) (bool, error) { - v := ctx.Result.GetReference(xmlSpec) + v, err := ctx.Result.GetReference(xmlSpec) + if err != nil { + return true, err + } if len(v.Values[durationSpec].Name) == 0 { u, err := url.Parse(xmlSpec + durationSpec) if err != nil { diff --git a/tools/exp/spec.json b/tools/exp/spec.json index c36a83a..4092679 100644 --- a/tools/exp/spec.json +++ b/tools/exp/spec.json @@ -30,7 +30,7 @@ } } ], - "id": "https://www.w3.org/TR/activitystreams-vocabulary/", + "id": "https://www.w3.org/TR/activitystreams-vocabulary", "type": "owl:Ontology", "name": "ActivityStreams", "sections": {