1 /**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18 package org.apache.hadoop.hbase.constraint;
19
20 import org.apache.hadoop.classification.InterfaceAudience;
21 import org.apache.hadoop.conf.Configurable;
22 import org.apache.hadoop.hbase.HTableDescriptor;
23 import org.apache.hadoop.hbase.client.Put;
24
25 /**
26 * Apply a {@link Constraint} (in traditional database terminology) to a HTable.
27 * Any number of {@link Constraint Constraints} can be added to the table, in
28 * any order.
29 * <p>
30 * A {@link Constraint} must be added to a table before the table is loaded via
31 * {@link Constraints#add(HTableDescriptor, Class...)} or
32 * {@link Constraints#add(HTableDescriptor,
33 * org.apache.hadoop.hbase.util.Pair...)}
34 * (if you want to add a configuration with the {@link Constraint}). Constraints
35 * will be run in the order that they are added. Further, a Constraint will be
36 * configured before it is run (on load).
37 * <p>
38 * See {@link Constraints#enableConstraint(HTableDescriptor, Class)} and
39 * {@link Constraints#disableConstraint(HTableDescriptor, Class)} for
40 * enabling/disabling of a given {@link Constraint} after it has been added.
41 * <p>
42 * If a {@link Put} is invalid, the Constraint should throw some sort of
43 * {@link org.apache.hadoop.hbase.constraint.ConstraintException}, indicating
44 * that the {@link Put} has failed. When
45 * this exception is thrown, not further retries of the {@link Put} are
46 * attempted nor are any other {@link Constraint Constraints} attempted (the
47 * {@link Put} is clearly not valid). Therefore, there are performance
48 * implications in the order in which {@link BaseConstraint Constraints} are
49 * specified.
50 * <p>
51 * If a {@link Constraint} fails to fail the {@link Put} via a
52 * {@link org.apache.hadoop.hbase.constraint.ConstraintException}, but instead
53 * throws a {@link RuntimeException},
54 * the entire constraint processing mechanism ({@link ConstraintProcessor}) will
55 * be unloaded from the table. This ensures that the region server is still
56 * functional, but no more {@link Put Puts} will be checked via
57 * {@link Constraint Constraints}.
58 * <p>
59 * Further, {@link Constraint Constraints} should probably not be used to
60 * enforce cross-table references as it will cause tremendous write slowdowns,
61 * but it is possible.
62 * <p>
63 * NOTE: Implementing classes must have a nullary (no-args) constructor
64 *
65 * @see BaseConstraint
66 * @see Constraints
67 */
68 @InterfaceAudience.Private
69 public interface Constraint extends Configurable {
70
71 /**
72 * Check a {@link Put} to ensure it is valid for the table. If the {@link Put}
73 * is valid, then just return from the method. Otherwise, throw an
74 * {@link Exception} specifying what happened. This {@link Exception} is
75 * propagated back to the client so you can see what caused the {@link Put} to
76 * fail.
77 * @param p {@link Put} to check
78 * @throws org.apache.hadoop.hbase.constraint.ConstraintException when the
79 * {@link Put} does not match the
80 * constraint.
81 */
82 void check(Put p) throws ConstraintException;
83
84 }