Package org.elasticsearch.index.mapper.ParseContext

Examples of org.elasticsearch.index.mapper.ParseContext.Document


    @Override
    public void postParse(ParseContext context) throws IOException {
        // In the case of nested docs, let's fill nested docs with version=1 so that Lucene doesn't write a Bitset for documents
        // that don't have the field. This is consistent with the default value for efficiency.
        for (int i = 1; i < context.docs().size(); i++) {
            final Document doc = context.docs().get(i);
            doc.add(new NumericDocValuesField(NAME, 1L));
        }
    }
View Full Code Here


            if (context.docs().size() > 1) {
                final IndexableField uidField = context.rootDoc().getField(UidFieldMapper.NAME);
                assert uidField != null;
                // we need to go over the docs and add it...
                for (int i = 1; i < context.docs().size(); i++) {
                    final Document doc = context.docs().get(i);
                    doc.add(new Field(UidFieldMapper.NAME, uidField.stringValue(), Defaults.NESTED_FIELD_TYPE));
                }
            }
        }
    }
View Full Code Here

            if (!context.isWithinCopyTo() && copyToFields.isEmpty() == false) {
                context = context.createCopyToContext();
                for (String field : copyToFields) {
                    // In case of a hierarchy of nested documents, we need to figure out
                    // which document the field should go to
                    Document targetDoc = null;
                    for (Document doc = context.doc(); doc != null; doc = doc.getParent()) {
                        if (field.startsWith(doc.getPrefix())) {
                            targetDoc = doc;
                            break;
                        }
View Full Code Here

            throw new MapperParsingException("object mapping for [" + name + "] tried to parse as object, but found a concrete value");
        }

        if (nested.isNested()) {
            context = context.createNestedContext(fullPath);
            Document nestedDoc = context.doc();
            Document parentDoc = nestedDoc.getParent();
            // pre add the uid field if possible (id was already provided)
            IndexableField uidField = parentDoc.getField(UidFieldMapper.NAME);
            if (uidField != null) {
                // we don't need to add it as a full uid field in nested docs, since we don't need versioning
                // we also rely on this for UidField#loadVersion

                // this is a deeply nested field
                nestedDoc.add(new Field(UidFieldMapper.NAME, uidField.stringValue(), UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
            }
            // the type of the nested doc starts with __, so we can identify that its a nested one in filters
            // note, we don't prefix it with the type of the doc since it allows us to execute a nested query
            // across types (for example, with similar nested objects)
            nestedDoc.add(new Field(TypeFieldMapper.NAME, nestedTypePathAsString, TypeFieldMapper.Defaults.FIELD_TYPE));
        }

        ContentPath.Type origPathType = context.path().pathType();
        context.path().pathType(pathType);

        // if we are at the end of the previous object, advance
        if (token == XContentParser.Token.END_OBJECT) {
            token = parser.nextToken();
        }
        if (token == XContentParser.Token.START_OBJECT) {
            // if we are just starting an OBJECT, advance, this is the object we are parsing, we need the name first
            token = parser.nextToken();
        }

        while (token != XContentParser.Token.END_OBJECT) {
            if (token == XContentParser.Token.START_OBJECT) {
                serializeObject(context, currentFieldName);
            } else if (token == XContentParser.Token.START_ARRAY) {
                serializeArray(context, currentFieldName);
            } else if (token == XContentParser.Token.FIELD_NAME) {
                currentFieldName = parser.currentName();
            } else if (token == XContentParser.Token.VALUE_NULL) {
                serializeNullValue(context, currentFieldName);
            } else if (token == null) {
                throw new MapperParsingException("object mapping for [" + name + "] tried to parse as object, but got EOF, has a concrete value been provided to it?");
            } else if (token.isValue()) {
                serializeValue(context, currentFieldName, token);
            }
            token = parser.nextToken();
        }
        // restore the enable path flag
        context.path().pathType(origPathType);
        if (nested.isNested()) {
            Document nestedDoc = context.doc();
            Document parentDoc = nestedDoc.getParent();
            if (nested.isIncludeInParent()) {
                for (IndexableField field : nestedDoc.getFields()) {
                    if (field.name().equals(UidFieldMapper.NAME) || field.name().equals(TypeFieldMapper.NAME)) {
                        continue;
                    } else {
                        parentDoc.add(field);
                    }
                }
            }
            if (nested.isIncludeInRoot()) {
                Document rootDoc = context.rootDoc();
                // don't add it twice, if its included in parent, and we are handling the master doc...
                if (!nested.isIncludeInParent() || parentDoc != rootDoc) {
                    for (IndexableField field : nestedDoc.getFields()) {
                        if (field.name().equals(UidFieldMapper.NAME) || field.name().equals(TypeFieldMapper.NAME)) {
                            continue;
                        } else {
                            rootDoc.add(field);
                        }
                    }
                }
            }
        }
View Full Code Here

                .field("date",   "2010-01-01")
                .endObject()
                .bytes());
       
        assertEquals(1, doc.docs().size());
        Document luceneDoc = doc.docs().get(0);
       
        assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("long"));
        assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("double"));
        assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("date"));
    }
View Full Code Here

                .field("ip",     "255.255.255.255")
                .endObject()
                .bytes());
       
        assertEquals(1, doc.docs().size());
        Document luceneDoc = doc.docs().get(0);
       
        assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("long"));
        assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("double"));
        assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("date"));
        assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("ip"));
       
        assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_32_BIT, luceneDoc.getField("int"));
        assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_32_BIT, luceneDoc.getField("float"));
       
        assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_16_BIT, luceneDoc.getField("short"));
        assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_8_BIT,  luceneDoc.getField("byte"));
    }
View Full Code Here

                .field("ip",     "255.255.255.255")
                .endObject()
                .bytes());
       
        assertEquals(1, doc.docs().size());
        Document luceneDoc = doc.docs().get(0);
       
        assertPrecisionStepEquals(1, luceneDoc.getField("int"));
        assertPrecisionStepEquals(2, luceneDoc.getField("float"));
        assertPrecisionStepEquals(1, luceneDoc.getField("long"));
        assertPrecisionStepEquals(2, luceneDoc.getField("double"));
        assertPrecisionStepEquals(1, luceneDoc.getField("short"));
        assertPrecisionStepEquals(2, luceneDoc.getField("byte"));
        assertPrecisionStepEquals(1, luceneDoc.getField("date"));
        assertPrecisionStepEquals(2, luceneDoc.getField("ip"));

    }
View Full Code Here

                .startObject()
                .field("int", "1234")
                .field("double", "1234")
                .endObject()
                .bytes());
        final Document doc = parsedDoc.rootDoc();
        assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "int"));
        assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "double"));
    }
View Full Code Here

                .startObject("double_field").field("boost", 6.0).field("value", 30.0).endObject()
                .startObject("float_field").field("boost", 7.0).field("value", 40.0).endObject()
                .startObject("long_field").field("boost", 8.0).field("value", 50).endObject()
                .startObject("short_field").field("boost", 9.0).field("value", 60).endObject()
                .bytes();
        Document doc = docMapper.parse(json).rootDoc();

        IndexableField f = doc.getField("str_field");
        assertThat((double) f.boost(), closeTo(2.0, 0.001));

        f = doc.getField("int_field");
        assertThat((double) f.boost(), closeTo(3.0, 0.001));

        f = doc.getField("byte_field");
        assertThat((double) f.boost(), closeTo(4.0, 0.001));

        f = doc.getField("date_field");
        assertThat((double) f.boost(), closeTo(5.0, 0.001));

        f = doc.getField("double_field");
        assertThat((double) f.boost(), closeTo(6.0, 0.001));

        f = doc.getField("float_field");
        assertThat((double) f.boost(), closeTo(7.0, 0.001));

        f = doc.getField("long_field");
        assertThat((double) f.boost(), closeTo(8.0, 0.001));

        f = doc.getField("short_field");
        assertThat((double) f.boost(), closeTo(9.0, 0.001));
    }
View Full Code Here

    @Test
    public void testSimpleAllMappers() throws Exception {
        String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping.json");
        DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
        byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
        Document doc = docMapper.parse(new BytesArray(json)).rootDoc();
        AllField field = (AllField) doc.getField("_all");
        // One field is boosted so we should see AllTokenStream used:
        assertThat(field.tokenStream(docMapper.mappers().indexAnalyzer(), null), Matchers.instanceOf(AllTokenStream.class));
        AllEntries allEntries = field.getAllEntries();
        assertThat(allEntries.fields().size(), equalTo(3));
        assertThat(allEntries.fields().contains("address.last.location"), equalTo(true));
View Full Code Here

TOP

Related Classes of org.elasticsearch.index.mapper.ParseContext.Document

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.