!7024 [MSLITE][Develop] add smart reply ops

Merge pull request !7024 from sunsuodong/smart_reply
pull/7024/MERGE
mindspore-ci-bot 5 years ago committed by Gitee
commit 63672fdb4d

@ -0,0 +1,38 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/ops/custom_extract_features.h"
namespace mindspore {
namespace lite {
#ifdef PRIMITIVE_WRITEABLE
int CustomExtractFeatures::UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) { return RET_OK; }
#else
int CustomExtractFeatures::UnPackToFlatBuilder(const schema::Primitive *primitive,
flatbuffers::FlatBufferBuilder *fbb) {
MS_ASSERT(nullptr != primitive);
MS_ASSERT(nullptr != fbb);
auto val_offset = schema::CreateCustomExtractFeatures(*fbb);
auto prim_offset = schema::CreatePrimitive(*fbb, schema::PrimitiveType_CustomExtractFeatures, val_offset.o);
fbb->Finish(prim_offset);
return RET_OK;
}
#endif
int CustomExtractFeatures::InferShape(std::vector<Tensor *> inputs_, std::vector<Tensor *> outputs_) {
PrimitiveC::InferShape(inputs_, outputs_);
return RET_INFER_INVALID;
}
} // namespace lite
} // namespace mindspore

@ -0,0 +1,40 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef LITE_MINDSPORE_LITE_C_OPS_CUSTOM_EXTRACT_FEATURES_H_
#define LITE_MINDSPORE_LITE_C_OPS_CUSTOM_EXTRACT_FEATURES_H_
#include <vector>
#include "src/ops/primitive_c.h"
namespace mindspore {
namespace lite {
class CustomExtractFeatures : public PrimitiveC {
public:
#ifdef PRIMITIVE_WRITEABLE
MS_DECLARE_PARENT(CustomExtractFeatures, PrimitiveC);
CustomExtractFeatures() = default;
explicit CustomExtractFeatures(schema::PrimitiveT *primitive) : PrimitiveC(primitive) {}
int UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) override;
#else
CustomExtractFeatures() = default;
int UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) override;
#endif
int InferShape(std::vector<Tensor *> inputs_, std::vector<Tensor *> outputs_) override;
};
} // namespace lite
} // namespace mindspore
#endif // LITE_MINDSPORE_LITE_C_OPS_CUSTOM_EXTRACT_FEATURES_H_

@ -0,0 +1,37 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/ops/custom_normalize.h"
namespace mindspore {
namespace lite {
#ifdef PRIMITIVE_WRITEABLE
int CustomNormalize::UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) { return RET_OK; }
#else
int CustomNormalize::UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) {
MS_ASSERT(nullptr != primitive);
MS_ASSERT(nullptr != fbb);
auto val_offset = schema::CreateCustomNormalize(*fbb);
auto prim_offset = schema::CreatePrimitive(*fbb, schema::PrimitiveType_CustomNormalize, val_offset.o);
fbb->Finish(prim_offset);
return RET_OK;
}
#endif
int CustomNormalize::InferShape(std::vector<Tensor *> inputs_, std::vector<Tensor *> outputs_) {
PrimitiveC::InferShape(inputs_, outputs_);
return RET_INFER_INVALID;
}
} // namespace lite
} // namespace mindspore

@ -0,0 +1,40 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef LITE_MINDSPORE_LITE_C_OPS_CUSTOM_NORMALIZE_H_
#define LITE_MINDSPORE_LITE_C_OPS_CUSTOM_NORMALIZE_H_
#include <vector>
#include "src/ops/primitive_c.h"
namespace mindspore {
namespace lite {
class CustomNormalize : public PrimitiveC {
public:
#ifdef PRIMITIVE_WRITEABLE
MS_DECLARE_PARENT(CustomNormalize, PrimitiveC);
CustomNormalize() = default;
explicit CustomNormalize(schema::PrimitiveT *primitive) : PrimitiveC(primitive) {}
int UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) override;
#else
CustomNormalize() = default;
int UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) override;
#endif
int InferShape(std::vector<Tensor *> inputs_, std::vector<Tensor *> outputs_) override;
};
} // namespace lite
} // namespace mindspore
#endif // LITE_MINDSPORE_LITE_C_OPS_CUSTOM_NORMALIZE_H_

@ -0,0 +1,37 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/ops/custom_predict.h"
namespace mindspore {
namespace lite {
#ifdef PRIMITIVE_WRITEABLE
int CustomPredict::UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) { return RET_OK; }
#else
int CustomPredict::UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) {
MS_ASSERT(nullptr != primitive);
MS_ASSERT(nullptr != fbb);
auto val_offset = schema::CreateCustomPredict(*fbb);
auto prim_offset = schema::CreatePrimitive(*fbb, schema::PrimitiveType_CustomPredict, val_offset.o);
fbb->Finish(prim_offset);
return RET_OK;
}
#endif
int CustomPredict::InferShape(std::vector<Tensor *> inputs_, std::vector<Tensor *> outputs_) {
PrimitiveC::InferShape(inputs_, outputs_);
return RET_INFER_INVALID;
}
} // namespace lite
} // namespace mindspore

@ -0,0 +1,40 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef LITE_MINDSPORE_LITE_C_OPS_CUSTOM_PREDICT_H_
#define LITE_MINDSPORE_LITE_C_OPS_CUSTOM_PREDICT_H_
#include <vector>
#include "src/ops/primitive_c.h"
namespace mindspore {
namespace lite {
class CustomPredict : public PrimitiveC {
public:
#ifdef PRIMITIVE_WRITEABLE
MS_DECLARE_PARENT(CustomPredict, PrimitiveC);
CustomPredict() = default;
explicit CustomPredict(schema::PrimitiveT *primitive) : PrimitiveC(primitive) {}
int UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) override;
#else
CustomPredict() = default;
int UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) override;
#endif
int InferShape(std::vector<Tensor *> inputs_, std::vector<Tensor *> outputs_) override;
};
} // namespace lite
} // namespace mindspore
#endif // LITE_MINDSPORE_LITE_C_OPS_CUSTOM_PREDICT_H_

@ -0,0 +1,37 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/ops/hashtable_lookup.h"
namespace mindspore {
namespace lite {
#ifdef PRIMITIVE_WRITEABLE
int HashtableLookup::UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) { return RET_OK; }
#else
int HashtableLookup::UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) {
MS_ASSERT(nullptr != primitive);
MS_ASSERT(nullptr != fbb);
auto val_offset = schema::CreateHashtableLookup(*fbb);
auto prim_offset = schema::CreatePrimitive(*fbb, schema::PrimitiveType_HashtableLookup, val_offset.o);
fbb->Finish(prim_offset);
return RET_OK;
}
#endif
int HashtableLookup::InferShape(std::vector<Tensor *> inputs_, std::vector<Tensor *> outputs_) {
PrimitiveC::InferShape(inputs_, outputs_);
return RET_INFER_INVALID;
}
} // namespace lite
} // namespace mindspore

@ -0,0 +1,40 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef LITE_MINDSPORE_LITE_C_OPS_HASHTABLE_LOOKUP_H_
#define LITE_MINDSPORE_LITE_C_OPS_HASHTABLE_LOOKUP_H_
#include <vector>
#include "src/ops/primitive_c.h"
namespace mindspore {
namespace lite {
class HashtableLookup : public PrimitiveC {
public:
#ifdef PRIMITIVE_WRITEABLE
MS_DECLARE_PARENT(HashtableLookup, PrimitiveC);
HashtableLookup() = default;
explicit HashtableLookup(schema::PrimitiveT *primitive) : PrimitiveC(primitive) {}
int UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) override;
#else
HashtableLookup() = default;
int UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) override;
#endif
int InferShape(std::vector<Tensor *> inputs_, std::vector<Tensor *> outputs_) override;
};
} // namespace lite
} // namespace mindspore
#endif // LITE_MINDSPORE_LITE_C_OPS_HASHTABLE_LOOKUP_H_

@ -0,0 +1,37 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/ops/lsh_projection.h"
namespace mindspore {
namespace lite {
#ifdef PRIMITIVE_WRITEABLE
int LshProjection::UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) { return RET_OK; }
#else
int LshProjection::UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) {
MS_ASSERT(nullptr != primitive);
MS_ASSERT(nullptr != fbb);
auto val_offset = schema::CreateLshProjection(*fbb);
auto prim_offset = schema::CreatePrimitive(*fbb, schema::PrimitiveType_LshProjection, val_offset.o);
fbb->Finish(prim_offset);
return RET_OK;
}
#endif
int LshProjection::InferShape(std::vector<Tensor *> inputs_, std::vector<Tensor *> outputs_) {
PrimitiveC::InferShape(inputs_, outputs_);
return RET_INFER_INVALID;
}
} // namespace lite
} // namespace mindspore

@ -0,0 +1,40 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef LITE_MINDSPORE_LITE_C_OPS_LSH_PROJECTION_H_
#define LITE_MINDSPORE_LITE_C_OPS_LSH_PROJECTION_H_
#include <vector>
#include "src/ops/primitive_c.h"
namespace mindspore {
namespace lite {
class LshProjection : public PrimitiveC {
public:
#ifdef PRIMITIVE_WRITEABLE
MS_DECLARE_PARENT(LshProjection, PrimitiveC);
LshProjection() = default;
explicit LshProjection(schema::PrimitiveT *primitive) : PrimitiveC(primitive) {}
int UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) override;
#else
LshProjection() = default;
int UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) override;
#endif
int InferShape(std::vector<Tensor *> inputs_, std::vector<Tensor *> outputs_) override;
};
} // namespace lite
} // namespace mindspore
#endif // LITE_MINDSPORE_LITE_C_OPS_LSH_PROJECTION_H_

@ -125,6 +125,12 @@
#include "src/ops/detection_post_process.h"
#include "src/ops/dropout.h"
#include "src/ops/real_div.h"
#include "src/ops/lsh_projection.h"
#include "src/ops/hashtable_lookup.h"
#include "src/ops/skip_gram.h"
#include "src/ops/custom_predict.h"
#include "src/ops/custom_normalize.h"
#include "src/ops/custom_extract_features.h"
#ifdef PRIMITIVE_WRITEABLE
#include "tools/converter/quantizer/quantize_util.h"
#endif
@ -674,6 +680,18 @@ PrimitiveC *PrimitiveC::Create(mindspore::schema::PrimitiveT *primitive) {
return new Dropout(primitive);
case schema::PrimitiveType_Neg:
return new Neg(primitive);
case schema::PrimitiveType_LshProjection:
return new LshProjection(primitive);
case schema::PrimitiveType_HashtableLookup:
return new HashtableLookup(primitive);
case schema::PrimitiveType_SkipGram:
return new SkipGram(primitive);
case schema::PrimitiveType_CustomPredict:
return new CustomPredict(primitive);
case schema::PrimitiveType_CustomNormalize:
return new CustomNormalize(primitive);
case schema::PrimitiveType_CustomExtractFeatures:
return new CustomExtractFeatures(primitive);
#ifdef SUPPORT_TRAIN
case schema::PrimitiveType_ActivationGrad:
@ -930,6 +948,18 @@ PrimitiveC *PrimitiveC::Create(const schema::Primitive *primitive) {
return NewPrimitiveC<DetectionPostProcess>(primitive);
case schema::PrimitiveType_Dropout:
return NewPrimitiveC<Dropout>(primitive);
case schema::PrimitiveType_LshProjection:
return NewPrimitiveC<LshProjection>(primitive);
case schema::PrimitiveType_HashtableLookup:
return NewPrimitiveC<HashtableLookup>(primitive);
case schema::PrimitiveType_SkipGram:
return NewPrimitiveC<SkipGram>(primitive);
case schema::PrimitiveType_CustomPredict:
return NewPrimitiveC<CustomPredict>(primitive);
case schema::PrimitiveType_CustomNormalize:
return NewPrimitiveC<CustomNormalize>(primitive);
case schema::PrimitiveType_CustomExtractFeatures:
return NewPrimitiveC<CustomExtractFeatures>(primitive);
#ifdef SUPPORT_TRAIN
case schema::PrimitiveType_ActivationGrad:

Loading…
Cancel
Save